diff --git a/dimos/agents/memory/test_image_embedding.py b/dimos/agents/memory/test_image_embedding.py
new file mode 100644
index 0000000000..bbcf96360a
--- /dev/null
+++ b/dimos/agents/memory/test_image_embedding.py
@@ -0,0 +1,224 @@
+# Copyright 2025 Dimensional Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Test module for the CLIP image embedding functionality in dimos.
+"""
+
+import os
+import time
+import numpy as np
+import pytest
+import reactivex as rx
+from reactivex import operators as ops
+from dimos.stream.video_provider import VideoProvider
+from dimos.agents.memory.image_embedding import ImageEmbeddingProvider
+
+
+class TestImageEmbedding:
+ """Test class for CLIP image embedding functionality."""
+
+ @pytest.fixture(scope="class")
+ def video_path(self):
+ """Return the path to the test video."""
+ # Use a video file from assets directory
+ base_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../assets"))
+ video_file = "trimmed_video_office.mov" # Use the same test video as YOLO test
+ video_path = os.path.join(base_dir, video_file)
+
+ # Fallback to any video file in assets directory if the specific one isn't found
+ if not os.path.exists(video_path):
+ for filename in os.listdir(base_dir):
+ if filename.endswith((".mp4", ".avi", ".mov")):
+ video_path = os.path.join(base_dir, filename)
+ break
+
+ return video_path
+
+ def test_clip_embedding_initialization(self):
+ """Test CLIP embedding provider initializes correctly."""
+ try:
+ # Initialize the embedding provider with CLIP model
+ embedding_provider = ImageEmbeddingProvider(model_name="clip", dimensions=512)
+ assert embedding_provider.model is not None, "CLIP model failed to initialize"
+ assert embedding_provider.processor is not None, "CLIP processor failed to initialize"
+ assert embedding_provider.model_name == "clip", "Model name should be 'clip'"
+ assert embedding_provider.dimensions == 512, "Embedding dimensions should be 512"
+ except Exception as e:
+ pytest.skip(f"Skipping test due to model initialization error: {e}")
+
+ def test_clip_embedding_process_video(self, video_path):
+ """Test CLIP embedding provider can process video frames and return embeddings."""
+ try:
+ # Initialize the embedding provider
+ embedding_provider = ImageEmbeddingProvider(model_name="clip", dimensions=512)
+
+ # Create video provider and get video stream observable
+ assert os.path.exists(video_path), f"Test video not found: {video_path}"
+ video_provider = VideoProvider(dev_name="test_video", video_source=video_path)
+
+ video_stream = video_provider.capture_video_as_observable(realtime=False, fps=15)
+
+ # Use ReactiveX operators to process the stream
+ def process_frame(frame):
+ try:
+ # Process frame with CLIP
+ embedding = embedding_provider.get_embedding(frame)
+ print(
+ f"Generated CLIP embedding with shape: {embedding.shape}, norm: {np.linalg.norm(embedding):.4f}"
+ )
+
+ return {"frame": frame, "embedding": embedding}
+ except Exception as e:
+ print(f"Error in process_frame: {e}")
+ return None
+
+ embedding_stream = video_stream.pipe(ops.map(process_frame))
+
+ results = []
+ frames_processed = 0
+ target_frames = 10
+
+ def on_next(result):
+ nonlocal frames_processed, results
+ if not result: # Skip None results
+ return
+
+ results.append(result)
+ frames_processed += 1
+
+ # Stop processing after target frames
+ if frames_processed >= target_frames:
+ subscription.dispose()
+
+ def on_error(error):
+ pytest.fail(f"Error in embedding stream: {error}")
+
+ def on_completed():
+ pass
+
+ # Subscribe and wait for results
+ subscription = embedding_stream.subscribe(
+ on_next=on_next, on_error=on_error, on_completed=on_completed
+ )
+
+ timeout = 60.0
+ start_time = time.time()
+ while frames_processed < target_frames and time.time() - start_time < timeout:
+ time.sleep(0.5)
+ print(f"Processed {frames_processed}/{target_frames} frames")
+
+ # Clean up subscription
+ subscription.dispose()
+ video_provider.dispose_all()
+
+ # Check if we have results
+ if len(results) == 0:
+ pytest.skip("No embeddings generated, but test connection established correctly")
+ return
+
+ print(f"Processed {len(results)} frames with CLIP embeddings")
+
+ # Analyze the results
+ assert len(results) > 0, "No embeddings generated"
+
+ # Check properties of first embedding
+ first_result = results[0]
+ assert "embedding" in first_result, "Result doesn't contain embedding"
+ assert "frame" in first_result, "Result doesn't contain frame"
+
+ # Check embedding shape and normalization
+ embedding = first_result["embedding"]
+ assert isinstance(embedding, np.ndarray), "Embedding is not a numpy array"
+ assert embedding.shape == (512,), (
+ f"Embedding has wrong shape: {embedding.shape}, expected (512,)"
+ )
+ assert abs(np.linalg.norm(embedding) - 1.0) < 1e-5, "Embedding is not normalized"
+
+ # Save the first embedding for similarity tests
+ if len(results) > 1 and "embedding" in results[0]:
+ # Create a class variable to store embeddings for the similarity test
+ TestImageEmbedding.test_embeddings = {
+ "embedding1": results[0]["embedding"],
+ "embedding2": results[1]["embedding"] if len(results) > 1 else None,
+ }
+ print(f"Saved embeddings for similarity testing")
+
+ print("CLIP embedding test passed successfully!")
+
+ except Exception as e:
+ pytest.fail(f"Test failed with error: {e}")
+
+ def test_clip_embedding_similarity(self):
+ """Test CLIP embedding similarity search and text-to-image queries."""
+ try:
+ # Skip if previous test didn't generate embeddings
+ if not hasattr(TestImageEmbedding, "test_embeddings"):
+ pytest.skip("No embeddings available from previous test")
+ return
+
+ # Get embeddings from previous test
+ embedding1 = TestImageEmbedding.test_embeddings["embedding1"]
+ embedding2 = TestImageEmbedding.test_embeddings["embedding2"]
+
+ # Initialize embedding provider for text embeddings
+ embedding_provider = ImageEmbeddingProvider(model_name="clip", dimensions=512)
+
+ # Test frame-to-frame similarity
+ if embedding1 is not None and embedding2 is not None:
+ # Compute cosine similarity
+ similarity = np.dot(embedding1, embedding2)
+ print(f"Similarity between first two frames: {similarity:.4f}")
+
+ # Should be in range [-1, 1]
+ assert -1.0 <= similarity <= 1.0, f"Similarity out of valid range: {similarity}"
+
+ # Test text-to-image similarity
+ if embedding1 is not None:
+ # Generate a list of text queries to test
+ text_queries = ["a video frame", "a person", "an outdoor scene", "a kitchen"]
+
+ # Test each text query
+ for text_query in text_queries:
+ # Get text embedding
+ text_embedding = embedding_provider.get_text_embedding(text_query)
+
+ # Check text embedding properties
+ assert isinstance(text_embedding, np.ndarray), (
+ "Text embedding is not a numpy array"
+ )
+ assert text_embedding.shape == (512,), (
+ f"Text embedding has wrong shape: {text_embedding.shape}"
+ )
+ assert abs(np.linalg.norm(text_embedding) - 1.0) < 1e-5, (
+ "Text embedding is not normalized"
+ )
+
+ # Compute similarity between frame and text
+ text_similarity = np.dot(embedding1, text_embedding)
+ print(f"Similarity between frame and '{text_query}': {text_similarity:.4f}")
+
+ # Should be in range [-1, 1]
+ assert -1.0 <= text_similarity <= 1.0, (
+ f"Text-image similarity out of range: {text_similarity}"
+ )
+
+ print("CLIP embedding similarity tests passed successfully!")
+
+ except Exception as e:
+ pytest.fail(f"Similarity test failed with error: {e}")
+
+
+if __name__ == "__main__":
+ pytest.main(["-v", "--disable-warnings", __file__])
diff --git a/dimos/models/manipulation/__init__.py b/dimos/models/manipulation/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/.gitattributes b/dimos/models/manipulation/contact_graspnet_pytorch/.gitattributes
new file mode 100644
index 0000000000..c9de20fc5d
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/.gitattributes
@@ -0,0 +1,2 @@
+test_data/** filter= diff= merge= -text
+
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/.gitignore b/dimos/models/manipulation/contact_graspnet_pytorch/.gitignore
new file mode 100644
index 0000000000..fcd6c96291
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/.gitignore
@@ -0,0 +1,14 @@
+*.pt
+results
+checkpoints/*
+!checkpoints/contact_graspnet/checkpoints/*.pt
+!checkpoints/contact_graspnet
+*.npz
+**/*.pyc
+**/__pycache__
+.vscode
+acronym/*
+!acronym/*.py
+
+*.egg-info
+
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/License.pdf b/dimos/models/manipulation/contact_graspnet_pytorch/License.pdf
new file mode 100644
index 0000000000..74d706826b
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/License.pdf differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/.gitattributes b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/.gitattributes
new file mode 100644
index 0000000000..dfe0770424
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/.gitattributes
@@ -0,0 +1,2 @@
+# Auto detect text files and perform LF normalization
+* text=auto
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/.gitignore b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/.gitignore
new file mode 100644
index 0000000000..510c73d0fd
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/.gitignore
@@ -0,0 +1,114 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+.hypothesis/
+.pytest_cache/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+.python-version
+
+# celery beat schedule file
+celerybeat-schedule
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/LICENSE b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/LICENSE
new file mode 100644
index 0000000000..cad006d1cc
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2019 benny
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
\ No newline at end of file
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/README.md b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/README.md
new file mode 100644
index 0000000000..49ccb5846d
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/README.md
@@ -0,0 +1,183 @@
+# Pytorch Implementation of PointNet and PointNet++
+
+This repo is implementation for [PointNet](http://openaccess.thecvf.com/content_cvpr_2017/papers/Qi_PointNet_Deep_Learning_CVPR_2017_paper.pdf) and [PointNet++](http://papers.nips.cc/paper/7095-pointnet-deep-hierarchical-feature-learning-on-point-sets-in-a-metric-space.pdf) in pytorch.
+
+## Update
+**2021/03/27:**
+
+(1) Release pre-trained models for semantic segmentation, where PointNet++ can achieve **53.5\%** mIoU.
+
+(2) Release pre-trained models for classification and part segmentation in `log/`.
+
+**2021/03/20:** Update codes for classification, including:
+
+(1) Add codes for training **ModelNet10** dataset. Using setting of ``--num_category 10``.
+
+(2) Add codes for running on CPU only. Using setting of ``--use_cpu``.
+
+(3) Add codes for offline data preprocessing to accelerate training. Using setting of ``--process_data``.
+
+(4) Add codes for training with uniform sampling. Using setting of ``--use_uniform_sample``.
+
+**2019/11/26:**
+
+(1) Fixed some errors in previous codes and added data augmentation tricks. Now classification by only 1024 points can achieve **92.8\%**!
+
+(2) Added testing codes, including classification and segmentation, and semantic segmentation with visualization.
+
+(3) Organized all models into `./models` files for easy using.
+
+## Install
+The latest codes are tested on Ubuntu 16.04, CUDA10.1, PyTorch 1.6 and Python 3.7:
+```shell
+conda install pytorch==1.6.0 cudatoolkit=10.1 -c pytorch
+```
+
+## Classification (ModelNet10/40)
+### Data Preparation
+Download alignment **ModelNet** [here](https://shapenet.cs.stanford.edu/media/modelnet40_normal_resampled.zip) and save in `data/modelnet40_normal_resampled/`.
+
+### Run
+You can run different modes with following codes.
+* If you want to use offline processing of data, you can use `--process_data` in the first run. You can download pre-processd data [here](https://drive.google.com/drive/folders/1_fBYbDO3XSdRt3DSbEBe41r5l9YpIGWF?usp=sharing) and save it in `data/modelnet40_normal_resampled/`.
+* If you want to train on ModelNet10, you can use `--num_category 10`.
+```shell
+# ModelNet40
+## Select different models in ./models
+
+## e.g., pointnet2_ssg without normal features
+python train_classification.py --model pointnet2_cls_ssg --log_dir pointnet2_cls_ssg
+python test_classification.py --log_dir pointnet2_cls_ssg
+
+## e.g., pointnet2_ssg with normal features
+python train_classification.py --model pointnet2_cls_ssg --use_normals --log_dir pointnet2_cls_ssg_normal
+python test_classification.py --use_normals --log_dir pointnet2_cls_ssg_normal
+
+## e.g., pointnet2_ssg with uniform sampling
+python train_classification.py --model pointnet2_cls_ssg --use_uniform_sample --log_dir pointnet2_cls_ssg_fps
+python test_classification.py --use_uniform_sample --log_dir pointnet2_cls_ssg_fps
+
+# ModelNet10
+## Similar setting like ModelNet40, just using --num_category 10
+
+## e.g., pointnet2_ssg without normal features
+python train_classification.py --model pointnet2_cls_ssg --log_dir pointnet2_cls_ssg --num_category 10
+python test_classification.py --log_dir pointnet2_cls_ssg --num_category 10
+```
+
+### Performance
+| Model | Accuracy |
+|--|--|
+| PointNet (Official) | 89.2|
+| PointNet2 (Official) | 91.9 |
+| PointNet (Pytorch without normal) | 90.6|
+| PointNet (Pytorch with normal) | 91.4|
+| PointNet2_SSG (Pytorch without normal) | 92.2|
+| PointNet2_SSG (Pytorch with normal) | 92.4|
+| PointNet2_MSG (Pytorch with normal) | **92.8**|
+
+## Part Segmentation (ShapeNet)
+### Data Preparation
+Download alignment **ShapeNet** [here](https://shapenet.cs.stanford.edu/media/shapenetcore_partanno_segmentation_benchmark_v0_normal.zip) and save in `data/shapenetcore_partanno_segmentation_benchmark_v0_normal/`.
+### Run
+```
+## Check model in ./models
+## e.g., pointnet2_msg
+python train_partseg.py --model pointnet2_part_seg_msg --normal --log_dir pointnet2_part_seg_msg
+python test_partseg.py --normal --log_dir pointnet2_part_seg_msg
+```
+### Performance
+| Model | Inctance avg IoU| Class avg IoU
+|--|--|--|
+|PointNet (Official) |83.7|80.4
+|PointNet2 (Official)|85.1 |81.9
+|PointNet (Pytorch)| 84.3 |81.1|
+|PointNet2_SSG (Pytorch)| 84.9| 81.8
+|PointNet2_MSG (Pytorch)| **85.4**| **82.5**
+
+
+## Semantic Segmentation (S3DIS)
+### Data Preparation
+Download 3D indoor parsing dataset (**S3DIS**) [here](http://buildingparser.stanford.edu/dataset.html) and save in `data/s3dis/Stanford3dDataset_v1.2_Aligned_Version/`.
+```
+cd data_utils
+python collect_indoor3d_data.py
+```
+Processed data will save in `data/stanford_indoor3d/`.
+### Run
+```
+## Check model in ./models
+## e.g., pointnet2_ssg
+python train_semseg.py --model pointnet2_sem_seg --test_area 5 --log_dir pointnet2_sem_seg
+python test_semseg.py --log_dir pointnet2_sem_seg --test_area 5 --visual
+```
+Visualization results will save in `log/sem_seg/pointnet2_sem_seg/visual/` and you can visualize these .obj file by [MeshLab](http://www.meshlab.net/).
+
+### Performance
+|Model | Overall Acc |Class avg IoU | Checkpoint
+|--|--|--|--|
+| PointNet (Pytorch) | 78.9 | 43.7| [40.7MB](log/sem_seg/pointnet_sem_seg) |
+| PointNet2_ssg (Pytorch) | **83.0** | **53.5**| [11.2MB](log/sem_seg/pointnet2_sem_seg) |
+
+## Visualization
+### Using show3d_balls.py
+```
+## build C++ code for visualization
+cd visualizer
+bash build.sh
+## run one example
+python show3d_balls.py
+```
+
+### Using MeshLab
+
+
+
+## Reference By
+[halimacc/pointnet3](https://github.com/halimacc/pointnet3)
+[fxia22/pointnet.pytorch](https://github.com/fxia22/pointnet.pytorch)
+[charlesq34/PointNet](https://github.com/charlesq34/pointnet)
+[charlesq34/PointNet++](https://github.com/charlesq34/pointnet2)
+
+
+## Citation
+If you find this repo useful in your research, please consider citing it and our other works:
+```
+@article{Pytorch_Pointnet_Pointnet2,
+ Author = {Xu Yan},
+ Title = {Pointnet/Pointnet++ Pytorch},
+ Journal = {https://github.com/yanx27/Pointnet_Pointnet2_pytorch},
+ Year = {2019}
+}
+```
+```
+@InProceedings{yan2020pointasnl,
+ title={PointASNL: Robust Point Clouds Processing using Nonlocal Neural Networks with Adaptive Sampling},
+ author={Yan, Xu and Zheng, Chaoda and Li, Zhen and Wang, Sheng and Cui, Shuguang},
+ journal={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition},
+ year={2020}
+}
+```
+```
+@InProceedings{yan2021sparse,
+ title={Sparse Single Sweep LiDAR Point Cloud Segmentation via Learning Contextual Shape Priors from Scene Completion},
+ author={Yan, Xu and Gao, Jiantao and Li, Jie and Zhang, Ruimao, and Li, Zhen and Huang, Rui and Cui, Shuguang},
+ journal={AAAI Conference on Artificial Intelligence ({AAAI})},
+ year={2021}
+}
+```
+```
+@InProceedings{yan20222dpass,
+ title={2DPASS: 2D Priors Assisted Semantic Segmentation on LiDAR Point Clouds},
+ author={Xu Yan and Jiantao Gao and Chaoda Zheng and Chao Zheng and Ruimao Zhang and Shuguang Cui and Zhen Li},
+ year={2022},
+ journal={ECCV}
+}
+```
+## Selected Projects using This Codebase
+* [PointConv: Deep Convolutional Networks on 3D Point Clouds, CVPR'19](https://github.com/Young98CN/pointconv_pytorch)
+* [On Isometry Robustness of Deep 3D Point Cloud Models under Adversarial Attacks, CVPR'20](https://github.com/skywalker6174/3d-isometry-robust)
+* [Label-Efficient Learning on Point Clouds using Approximate Convex Decompositions, ECCV'20](https://github.com/matheusgadelha/PointCloudLearningACD)
+* [PCT: Point Cloud Transformer](https://github.com/MenghaoGuo/PCT)
+* [PSNet: Fast Data Structuring for Hierarchical Deep Learning on Point Cloud](https://github.com/lly007/PointStructuringNet)
+* [Stratified Transformer for 3D Point Cloud Segmentation, CVPR'22](https://github.com/dvlab-research/stratified-transformer)
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/data_utils/ModelNetDataLoader.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/data_utils/ModelNetDataLoader.py
new file mode 100644
index 0000000000..b9cd9f5636
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/data_utils/ModelNetDataLoader.py
@@ -0,0 +1,146 @@
+'''
+@author: Xu Yan
+@file: ModelNet.py
+@time: 2021/3/19 15:51
+'''
+import os
+import numpy as np
+import warnings
+import pickle
+
+from tqdm import tqdm
+from torch.utils.data import Dataset
+
+warnings.filterwarnings('ignore')
+
+
+def pc_normalize(pc):
+ centroid = np.mean(pc, axis=0)
+ pc = pc - centroid
+ m = np.max(np.sqrt(np.sum(pc**2, axis=1)))
+ pc = pc / m
+ return pc
+
+
+def farthest_point_sample(point, npoint):
+ """
+ Input:
+ xyz: pointcloud data, [N, D]
+ npoint: number of samples
+ Return:
+ centroids: sampled pointcloud index, [npoint, D]
+ """
+ N, D = point.shape
+ xyz = point[:,:3]
+ centroids = np.zeros((npoint,))
+ distance = np.ones((N,)) * 1e10
+ farthest = np.random.randint(0, N)
+ for i in range(npoint):
+ centroids[i] = farthest
+ centroid = xyz[farthest, :]
+ dist = np.sum((xyz - centroid) ** 2, -1)
+ mask = dist < distance
+ distance[mask] = dist[mask]
+ farthest = np.argmax(distance, -1)
+ point = point[centroids.astype(np.int32)]
+ return point
+
+
+class ModelNetDataLoader(Dataset):
+ def __init__(self, root, args, split='train', process_data=False):
+ self.root = root
+ self.npoints = args.num_point
+ self.process_data = process_data
+ self.uniform = args.use_uniform_sample
+ self.use_normals = args.use_normals
+ self.num_category = args.num_category
+
+ if self.num_category == 10:
+ self.catfile = os.path.join(self.root, 'modelnet10_shape_names.txt')
+ else:
+ self.catfile = os.path.join(self.root, 'modelnet40_shape_names.txt')
+
+ self.cat = [line.rstrip() for line in open(self.catfile)]
+ self.classes = dict(zip(self.cat, range(len(self.cat))))
+
+ shape_ids = {}
+ if self.num_category == 10:
+ shape_ids['train'] = [line.rstrip() for line in open(os.path.join(self.root, 'modelnet10_train.txt'))]
+ shape_ids['test'] = [line.rstrip() for line in open(os.path.join(self.root, 'modelnet10_test.txt'))]
+ else:
+ shape_ids['train'] = [line.rstrip() for line in open(os.path.join(self.root, 'modelnet40_train.txt'))]
+ shape_ids['test'] = [line.rstrip() for line in open(os.path.join(self.root, 'modelnet40_test.txt'))]
+
+ assert (split == 'train' or split == 'test')
+ shape_names = ['_'.join(x.split('_')[0:-1]) for x in shape_ids[split]]
+ self.datapath = [(shape_names[i], os.path.join(self.root, shape_names[i], shape_ids[split][i]) + '.txt') for i
+ in range(len(shape_ids[split]))]
+ print('The size of %s data is %d' % (split, len(self.datapath)))
+
+ if self.uniform:
+ self.save_path = os.path.join(root, 'modelnet%d_%s_%dpts_fps.dat' % (self.num_category, split, self.npoints))
+ else:
+ self.save_path = os.path.join(root, 'modelnet%d_%s_%dpts.dat' % (self.num_category, split, self.npoints))
+
+ if self.process_data:
+ if not os.path.exists(self.save_path):
+ print('Processing data %s (only running in the first time)...' % self.save_path)
+ self.list_of_points = [None] * len(self.datapath)
+ self.list_of_labels = [None] * len(self.datapath)
+
+ for index in tqdm(range(len(self.datapath)), total=len(self.datapath)):
+ fn = self.datapath[index]
+ cls = self.classes[self.datapath[index][0]]
+ cls = np.array([cls]).astype(np.int32)
+ point_set = np.loadtxt(fn[1], delimiter=',').astype(np.float32)
+
+ if self.uniform:
+ point_set = farthest_point_sample(point_set, self.npoints)
+ else:
+ point_set = point_set[0:self.npoints, :]
+
+ self.list_of_points[index] = point_set
+ self.list_of_labels[index] = cls
+
+ with open(self.save_path, 'wb') as f:
+ pickle.dump([self.list_of_points, self.list_of_labels], f)
+ else:
+ print('Load processed data from %s...' % self.save_path)
+ with open(self.save_path, 'rb') as f:
+ self.list_of_points, self.list_of_labels = pickle.load(f)
+
+ def __len__(self):
+ return len(self.datapath)
+
+ def _get_item(self, index):
+ if self.process_data:
+ point_set, label = self.list_of_points[index], self.list_of_labels[index]
+ else:
+ fn = self.datapath[index]
+ cls = self.classes[self.datapath[index][0]]
+ label = np.array([cls]).astype(np.int32)
+ point_set = np.loadtxt(fn[1], delimiter=',').astype(np.float32)
+
+ if self.uniform:
+ point_set = farthest_point_sample(point_set, self.npoints)
+ else:
+ point_set = point_set[0:self.npoints, :]
+
+ point_set[:, 0:3] = pc_normalize(point_set[:, 0:3])
+ if not self.use_normals:
+ point_set = point_set[:, 0:3]
+
+ return point_set, label[0]
+
+ def __getitem__(self, index):
+ return self._get_item(index)
+
+
+if __name__ == '__main__':
+ import torch
+
+ data = ModelNetDataLoader('/data/modelnet40_normal_resampled/', split='train')
+ DataLoader = torch.utils.data.DataLoader(data, batch_size=12, shuffle=True)
+ for point, label in DataLoader:
+ print(point.shape)
+ print(label.shape)
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/data_utils/S3DISDataLoader.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/data_utils/S3DISDataLoader.py
new file mode 100644
index 0000000000..d4aae42e6c
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/data_utils/S3DISDataLoader.py
@@ -0,0 +1,194 @@
+import os
+import numpy as np
+
+from tqdm import tqdm
+from torch.utils.data import Dataset
+
+
+class S3DISDataset(Dataset):
+ def __init__(self, split='train', data_root='trainval_fullarea', num_point=4096, test_area=5, block_size=1.0, sample_rate=1.0, transform=None):
+ super().__init__()
+ self.num_point = num_point
+ self.block_size = block_size
+ self.transform = transform
+ rooms = sorted(os.listdir(data_root))
+ rooms = [room for room in rooms if 'Area_' in room]
+ if split == 'train':
+ rooms_split = [room for room in rooms if not 'Area_{}'.format(test_area) in room]
+ else:
+ rooms_split = [room for room in rooms if 'Area_{}'.format(test_area) in room]
+
+ self.room_points, self.room_labels = [], []
+ self.room_coord_min, self.room_coord_max = [], []
+ num_point_all = []
+ labelweights = np.zeros(13)
+
+ for room_name in tqdm(rooms_split, total=len(rooms_split)):
+ room_path = os.path.join(data_root, room_name)
+ room_data = np.load(room_path) # xyzrgbl, N*7
+ points, labels = room_data[:, 0:6], room_data[:, 6] # xyzrgb, N*6; l, N
+ tmp, _ = np.histogram(labels, range(14))
+ labelweights += tmp
+ coord_min, coord_max = np.amin(points, axis=0)[:3], np.amax(points, axis=0)[:3]
+ self.room_points.append(points), self.room_labels.append(labels)
+ self.room_coord_min.append(coord_min), self.room_coord_max.append(coord_max)
+ num_point_all.append(labels.size)
+ labelweights = labelweights.astype(np.float32)
+ labelweights = labelweights / np.sum(labelweights)
+ self.labelweights = np.power(np.amax(labelweights) / labelweights, 1 / 3.0)
+ print(self.labelweights)
+ sample_prob = num_point_all / np.sum(num_point_all)
+ num_iter = int(np.sum(num_point_all) * sample_rate / num_point)
+ room_idxs = []
+ for index in range(len(rooms_split)):
+ room_idxs.extend([index] * int(round(sample_prob[index] * num_iter)))
+ self.room_idxs = np.array(room_idxs)
+ print("Totally {} samples in {} set.".format(len(self.room_idxs), split))
+
+ def __getitem__(self, idx):
+ room_idx = self.room_idxs[idx]
+ points = self.room_points[room_idx] # N * 6
+ labels = self.room_labels[room_idx] # N
+ N_points = points.shape[0]
+
+ while (True):
+ center = points[np.random.choice(N_points)][:3]
+ block_min = center - [self.block_size / 2.0, self.block_size / 2.0, 0]
+ block_max = center + [self.block_size / 2.0, self.block_size / 2.0, 0]
+ point_idxs = np.where((points[:, 0] >= block_min[0]) & (points[:, 0] <= block_max[0]) & (points[:, 1] >= block_min[1]) & (points[:, 1] <= block_max[1]))[0]
+ if point_idxs.size > 1024:
+ break
+
+ if point_idxs.size >= self.num_point:
+ selected_point_idxs = np.random.choice(point_idxs, self.num_point, replace=False)
+ else:
+ selected_point_idxs = np.random.choice(point_idxs, self.num_point, replace=True)
+
+ # normalize
+ selected_points = points[selected_point_idxs, :] # num_point * 6
+ current_points = np.zeros((self.num_point, 9)) # num_point * 9
+ current_points[:, 6] = selected_points[:, 0] / self.room_coord_max[room_idx][0]
+ current_points[:, 7] = selected_points[:, 1] / self.room_coord_max[room_idx][1]
+ current_points[:, 8] = selected_points[:, 2] / self.room_coord_max[room_idx][2]
+ selected_points[:, 0] = selected_points[:, 0] - center[0]
+ selected_points[:, 1] = selected_points[:, 1] - center[1]
+ selected_points[:, 3:6] /= 255.0
+ current_points[:, 0:6] = selected_points
+ current_labels = labels[selected_point_idxs]
+ if self.transform is not None:
+ current_points, current_labels = self.transform(current_points, current_labels)
+ return current_points, current_labels
+
+ def __len__(self):
+ return len(self.room_idxs)
+
+class ScannetDatasetWholeScene():
+ # prepare to give prediction on each points
+ def __init__(self, root, block_points=4096, split='test', test_area=5, stride=0.5, block_size=1.0, padding=0.001):
+ self.block_points = block_points
+ self.block_size = block_size
+ self.padding = padding
+ self.root = root
+ self.split = split
+ self.stride = stride
+ self.scene_points_num = []
+ assert split in ['train', 'test']
+ if self.split == 'train':
+ self.file_list = [d for d in os.listdir(root) if d.find('Area_%d' % test_area) is -1]
+ else:
+ self.file_list = [d for d in os.listdir(root) if d.find('Area_%d' % test_area) is not -1]
+ self.scene_points_list = []
+ self.semantic_labels_list = []
+ self.room_coord_min, self.room_coord_max = [], []
+ for file in self.file_list:
+ data = np.load(root + file)
+ points = data[:, :3]
+ self.scene_points_list.append(data[:, :6])
+ self.semantic_labels_list.append(data[:, 6])
+ coord_min, coord_max = np.amin(points, axis=0)[:3], np.amax(points, axis=0)[:3]
+ self.room_coord_min.append(coord_min), self.room_coord_max.append(coord_max)
+ assert len(self.scene_points_list) == len(self.semantic_labels_list)
+
+ labelweights = np.zeros(13)
+ for seg in self.semantic_labels_list:
+ tmp, _ = np.histogram(seg, range(14))
+ self.scene_points_num.append(seg.shape[0])
+ labelweights += tmp
+ labelweights = labelweights.astype(np.float32)
+ labelweights = labelweights / np.sum(labelweights)
+ self.labelweights = np.power(np.amax(labelweights) / labelweights, 1 / 3.0)
+
+ def __getitem__(self, index):
+ point_set_ini = self.scene_points_list[index]
+ points = point_set_ini[:,:6]
+ labels = self.semantic_labels_list[index]
+ coord_min, coord_max = np.amin(points, axis=0)[:3], np.amax(points, axis=0)[:3]
+ grid_x = int(np.ceil(float(coord_max[0] - coord_min[0] - self.block_size) / self.stride) + 1)
+ grid_y = int(np.ceil(float(coord_max[1] - coord_min[1] - self.block_size) / self.stride) + 1)
+ data_room, label_room, sample_weight, index_room = np.array([]), np.array([]), np.array([]), np.array([])
+ for index_y in range(0, grid_y):
+ for index_x in range(0, grid_x):
+ s_x = coord_min[0] + index_x * self.stride
+ e_x = min(s_x + self.block_size, coord_max[0])
+ s_x = e_x - self.block_size
+ s_y = coord_min[1] + index_y * self.stride
+ e_y = min(s_y + self.block_size, coord_max[1])
+ s_y = e_y - self.block_size
+ point_idxs = np.where(
+ (points[:, 0] >= s_x - self.padding) & (points[:, 0] <= e_x + self.padding) & (points[:, 1] >= s_y - self.padding) & (
+ points[:, 1] <= e_y + self.padding))[0]
+ if point_idxs.size == 0:
+ continue
+ num_batch = int(np.ceil(point_idxs.size / self.block_points))
+ point_size = int(num_batch * self.block_points)
+ replace = False if (point_size - point_idxs.size <= point_idxs.size) else True
+ point_idxs_repeat = np.random.choice(point_idxs, point_size - point_idxs.size, replace=replace)
+ point_idxs = np.concatenate((point_idxs, point_idxs_repeat))
+ np.random.shuffle(point_idxs)
+ data_batch = points[point_idxs, :]
+ normlized_xyz = np.zeros((point_size, 3))
+ normlized_xyz[:, 0] = data_batch[:, 0] / coord_max[0]
+ normlized_xyz[:, 1] = data_batch[:, 1] / coord_max[1]
+ normlized_xyz[:, 2] = data_batch[:, 2] / coord_max[2]
+ data_batch[:, 0] = data_batch[:, 0] - (s_x + self.block_size / 2.0)
+ data_batch[:, 1] = data_batch[:, 1] - (s_y + self.block_size / 2.0)
+ data_batch[:, 3:6] /= 255.0
+ data_batch = np.concatenate((data_batch, normlized_xyz), axis=1)
+ label_batch = labels[point_idxs].astype(int)
+ batch_weight = self.labelweights[label_batch]
+
+ data_room = np.vstack([data_room, data_batch]) if data_room.size else data_batch
+ label_room = np.hstack([label_room, label_batch]) if label_room.size else label_batch
+ sample_weight = np.hstack([sample_weight, batch_weight]) if label_room.size else batch_weight
+ index_room = np.hstack([index_room, point_idxs]) if index_room.size else point_idxs
+ data_room = data_room.reshape((-1, self.block_points, data_room.shape[1]))
+ label_room = label_room.reshape((-1, self.block_points))
+ sample_weight = sample_weight.reshape((-1, self.block_points))
+ index_room = index_room.reshape((-1, self.block_points))
+ return data_room, label_room, sample_weight, index_room
+
+ def __len__(self):
+ return len(self.scene_points_list)
+
+if __name__ == '__main__':
+ data_root = '/data/yxu/PointNonLocal/data/stanford_indoor3d/'
+ num_point, test_area, block_size, sample_rate = 4096, 5, 1.0, 0.01
+
+ point_data = S3DISDataset(split='train', data_root=data_root, num_point=num_point, test_area=test_area, block_size=block_size, sample_rate=sample_rate, transform=None)
+ print('point data size:', point_data.__len__())
+ print('point data 0 shape:', point_data.__getitem__(0)[0].shape)
+ print('point label 0 shape:', point_data.__getitem__(0)[1].shape)
+ import torch, time, random
+ manual_seed = 123
+ random.seed(manual_seed)
+ np.random.seed(manual_seed)
+ torch.manual_seed(manual_seed)
+ torch.cuda.manual_seed_all(manual_seed)
+ def worker_init_fn(worker_id):
+ random.seed(manual_seed + worker_id)
+ train_loader = torch.utils.data.DataLoader(point_data, batch_size=16, shuffle=True, num_workers=16, pin_memory=True, worker_init_fn=worker_init_fn)
+ for idx in range(4):
+ end = time.time()
+ for i, (input, target) in enumerate(train_loader):
+ print('time: {}/{}--{}'.format(i+1, len(train_loader), time.time() - end))
+ end = time.time()
\ No newline at end of file
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/data_utils/ShapeNetDataLoader.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/data_utils/ShapeNetDataLoader.py
new file mode 100644
index 0000000000..171a904533
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/data_utils/ShapeNetDataLoader.py
@@ -0,0 +1,118 @@
+# *_*coding:utf-8 *_*
+import os
+import json
+import warnings
+import numpy as np
+from torch.utils.data import Dataset
+warnings.filterwarnings('ignore')
+
+def pc_normalize(pc):
+ centroid = np.mean(pc, axis=0)
+ pc = pc - centroid
+ m = np.max(np.sqrt(np.sum(pc ** 2, axis=1)))
+ pc = pc / m
+ return pc
+
+class PartNormalDataset(Dataset):
+ def __init__(self,root = './data/shapenetcore_partanno_segmentation_benchmark_v0_normal', npoints=2500, split='train', class_choice=None, normal_channel=False):
+ self.npoints = npoints
+ self.root = root
+ self.catfile = os.path.join(self.root, 'synsetoffset2category.txt')
+ self.cat = {}
+ self.normal_channel = normal_channel
+
+
+ with open(self.catfile, 'r') as f:
+ for line in f:
+ ls = line.strip().split()
+ self.cat[ls[0]] = ls[1]
+ self.cat = {k: v for k, v in self.cat.items()}
+ self.classes_original = dict(zip(self.cat, range(len(self.cat))))
+
+ if not class_choice is None:
+ self.cat = {k:v for k,v in self.cat.items() if k in class_choice}
+ # print(self.cat)
+
+ self.meta = {}
+ with open(os.path.join(self.root, 'train_test_split', 'shuffled_train_file_list.json'), 'r') as f:
+ train_ids = set([str(d.split('/')[2]) for d in json.load(f)])
+ with open(os.path.join(self.root, 'train_test_split', 'shuffled_val_file_list.json'), 'r') as f:
+ val_ids = set([str(d.split('/')[2]) for d in json.load(f)])
+ with open(os.path.join(self.root, 'train_test_split', 'shuffled_test_file_list.json'), 'r') as f:
+ test_ids = set([str(d.split('/')[2]) for d in json.load(f)])
+ for item in self.cat:
+ # print('category', item)
+ self.meta[item] = []
+ dir_point = os.path.join(self.root, self.cat[item])
+ fns = sorted(os.listdir(dir_point))
+ # print(fns[0][0:-4])
+ if split == 'trainval':
+ fns = [fn for fn in fns if ((fn[0:-4] in train_ids) or (fn[0:-4] in val_ids))]
+ elif split == 'train':
+ fns = [fn for fn in fns if fn[0:-4] in train_ids]
+ elif split == 'val':
+ fns = [fn for fn in fns if fn[0:-4] in val_ids]
+ elif split == 'test':
+ fns = [fn for fn in fns if fn[0:-4] in test_ids]
+ else:
+ print('Unknown split: %s. Exiting..' % (split))
+ exit(-1)
+
+ # print(os.path.basename(fns))
+ for fn in fns:
+ token = (os.path.splitext(os.path.basename(fn))[0])
+ self.meta[item].append(os.path.join(dir_point, token + '.txt'))
+
+ self.datapath = []
+ for item in self.cat:
+ for fn in self.meta[item]:
+ self.datapath.append((item, fn))
+
+ self.classes = {}
+ for i in self.cat.keys():
+ self.classes[i] = self.classes_original[i]
+
+ # Mapping from category ('Chair') to a list of int [10,11,12,13] as segmentation labels
+ self.seg_classes = {'Earphone': [16, 17, 18], 'Motorbike': [30, 31, 32, 33, 34, 35], 'Rocket': [41, 42, 43],
+ 'Car': [8, 9, 10, 11], 'Laptop': [28, 29], 'Cap': [6, 7], 'Skateboard': [44, 45, 46],
+ 'Mug': [36, 37], 'Guitar': [19, 20, 21], 'Bag': [4, 5], 'Lamp': [24, 25, 26, 27],
+ 'Table': [47, 48, 49], 'Airplane': [0, 1, 2, 3], 'Pistol': [38, 39, 40],
+ 'Chair': [12, 13, 14, 15], 'Knife': [22, 23]}
+
+ # for cat in sorted(self.seg_classes.keys()):
+ # print(cat, self.seg_classes[cat])
+
+ self.cache = {} # from index to (point_set, cls, seg) tuple
+ self.cache_size = 20000
+
+
+ def __getitem__(self, index):
+ if index in self.cache:
+ point_set, cls, seg = self.cache[index]
+ else:
+ fn = self.datapath[index]
+ cat = self.datapath[index][0]
+ cls = self.classes[cat]
+ cls = np.array([cls]).astype(np.int32)
+ data = np.loadtxt(fn[1]).astype(np.float32)
+ if not self.normal_channel:
+ point_set = data[:, 0:3]
+ else:
+ point_set = data[:, 0:6]
+ seg = data[:, -1].astype(np.int32)
+ if len(self.cache) < self.cache_size:
+ self.cache[index] = (point_set, cls, seg)
+ point_set[:, 0:3] = pc_normalize(point_set[:, 0:3])
+
+ choice = np.random.choice(len(seg), self.npoints, replace=True)
+ # resample
+ point_set = point_set[choice, :]
+ seg = seg[choice]
+
+ return point_set, cls, seg
+
+ def __len__(self):
+ return len(self.datapath)
+
+
+
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/data_utils/collect_indoor3d_data.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/data_utils/collect_indoor3d_data.py
new file mode 100644
index 0000000000..81ecdbee68
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/data_utils/collect_indoor3d_data.py
@@ -0,0 +1,24 @@
+import os
+import sys
+from indoor3d_util import DATA_PATH, collect_point_label
+
+BASE_DIR = os.path.dirname(os.path.abspath(__file__))
+ROOT_DIR = os.path.dirname(BASE_DIR)
+sys.path.append(BASE_DIR)
+
+anno_paths = [line.rstrip() for line in open(os.path.join(BASE_DIR, 'meta/anno_paths.txt'))]
+anno_paths = [os.path.join(DATA_PATH, p) for p in anno_paths]
+
+output_folder = os.path.join(ROOT_DIR, 'data/stanford_indoor3d')
+if not os.path.exists(output_folder):
+ os.mkdir(output_folder)
+
+# Note: there is an extra character in the v1.2 data in Area_5/hallway_6. It's fixed manually.
+for anno_path in anno_paths:
+ print(anno_path)
+ try:
+ elements = anno_path.split('/')
+ out_filename = elements[-3]+'_'+elements[-2]+'.npy' # Area_1_hallway_1.npy
+ collect_point_label(anno_path, os.path.join(output_folder, out_filename), 'numpy')
+ except:
+ print(anno_path, 'ERROR!!')
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/data_utils/indoor3d_util.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/data_utils/indoor3d_util.py
new file mode 100644
index 0000000000..4993672c13
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/data_utils/indoor3d_util.py
@@ -0,0 +1,598 @@
+import numpy as np
+import glob
+import os
+import sys
+
+BASE_DIR = os.path.dirname(os.path.abspath(__file__))
+ROOT_DIR = os.path.dirname(BASE_DIR)
+sys.path.append(BASE_DIR)
+
+DATA_PATH = os.path.join(ROOT_DIR, 'data','s3dis', 'Stanford3dDataset_v1.2_Aligned_Version')
+g_classes = [x.rstrip() for x in open(os.path.join(BASE_DIR, 'meta/class_names.txt'))]
+g_class2label = {cls: i for i,cls in enumerate(g_classes)}
+g_class2color = {'ceiling': [0,255,0],
+ 'floor': [0,0,255],
+ 'wall': [0,255,255],
+ 'beam': [255,255,0],
+ 'column': [255,0,255],
+ 'window': [100,100,255],
+ 'door': [200,200,100],
+ 'table': [170,120,200],
+ 'chair': [255,0,0],
+ 'sofa': [200,100,100],
+ 'bookcase': [10,200,100],
+ 'board': [200,200,200],
+ 'clutter': [50,50,50]}
+g_easy_view_labels = [7,8,9,10,11,1]
+g_label2color = {g_classes.index(cls): g_class2color[cls] for cls in g_classes}
+
+
+# -----------------------------------------------------------------------------
+# CONVERT ORIGINAL DATA TO OUR DATA_LABEL FILES
+# -----------------------------------------------------------------------------
+
+def collect_point_label(anno_path, out_filename, file_format='txt'):
+ """ Convert original dataset files to data_label file (each line is XYZRGBL).
+ We aggregated all the points from each instance in the room.
+
+ Args:
+ anno_path: path to annotations. e.g. Area_1/office_2/Annotations/
+ out_filename: path to save collected points and labels (each line is XYZRGBL)
+ file_format: txt or numpy, determines what file format to save.
+ Returns:
+ None
+ Note:
+ the points are shifted before save, the most negative point is now at origin.
+ """
+ points_list = []
+ for f in glob.glob(os.path.join(anno_path, '*.txt')):
+ cls = os.path.basename(f).split('_')[0]
+ print(f)
+ if cls not in g_classes: # note: in some room there is 'staris' class..
+ cls = 'clutter'
+
+ points = np.loadtxt(f)
+ labels = np.ones((points.shape[0],1)) * g_class2label[cls]
+ points_list.append(np.concatenate([points, labels], 1)) # Nx7
+
+ data_label = np.concatenate(points_list, 0)
+ xyz_min = np.amin(data_label, axis=0)[0:3]
+ data_label[:, 0:3] -= xyz_min
+
+ if file_format=='txt':
+ fout = open(out_filename, 'w')
+ for i in range(data_label.shape[0]):
+ fout.write('%f %f %f %d %d %d %d\n' % \
+ (data_label[i,0], data_label[i,1], data_label[i,2],
+ data_label[i,3], data_label[i,4], data_label[i,5],
+ data_label[i,6]))
+ fout.close()
+ elif file_format=='numpy':
+ np.save(out_filename, data_label)
+ else:
+ print('ERROR!! Unknown file format: %s, please use txt or numpy.' % \
+ (file_format))
+ exit()
+
+def data_to_obj(data,name='example.obj',no_wall=True):
+ fout = open(name, 'w')
+ label = data[:, -1].astype(int)
+ for i in range(data.shape[0]):
+ if no_wall and ((label[i] == 2) or (label[i]==0)):
+ continue
+ fout.write('v %f %f %f %d %d %d\n' % \
+ (data[i, 0], data[i, 1], data[i, 2], data[i, 3], data[i, 4], data[i, 5]))
+ fout.close()
+
+def point_label_to_obj(input_filename, out_filename, label_color=True, easy_view=False, no_wall=False):
+ """ For visualization of a room from data_label file,
+ input_filename: each line is X Y Z R G B L
+ out_filename: OBJ filename,
+ visualize input file by coloring point with label color
+ easy_view: only visualize furnitures and floor
+ """
+ data_label = np.loadtxt(input_filename)
+ data = data_label[:, 0:6]
+ label = data_label[:, -1].astype(int)
+ fout = open(out_filename, 'w')
+ for i in range(data.shape[0]):
+ color = g_label2color[label[i]]
+ if easy_view and (label[i] not in g_easy_view_labels):
+ continue
+ if no_wall and ((label[i] == 2) or (label[i]==0)):
+ continue
+ if label_color:
+ fout.write('v %f %f %f %d %d %d\n' % \
+ (data[i,0], data[i,1], data[i,2], color[0], color[1], color[2]))
+ else:
+ fout.write('v %f %f %f %d %d %d\n' % \
+ (data[i,0], data[i,1], data[i,2], data[i,3], data[i,4], data[i,5]))
+ fout.close()
+
+
+
+# -----------------------------------------------------------------------------
+# PREPARE BLOCK DATA FOR DEEPNETS TRAINING/TESTING
+# -----------------------------------------------------------------------------
+
+def sample_data(data, num_sample):
+ """ data is in N x ...
+ we want to keep num_samplexC of them.
+ if N > num_sample, we will randomly keep num_sample of them.
+ if N < num_sample, we will randomly duplicate samples.
+ """
+ N = data.shape[0]
+ if (N == num_sample):
+ return data, range(N)
+ elif (N > num_sample):
+ sample = np.random.choice(N, num_sample)
+ return data[sample, ...], sample
+ else:
+ sample = np.random.choice(N, num_sample-N)
+ dup_data = data[sample, ...]
+ return np.concatenate([data, dup_data], 0), list(range(N))+list(sample)
+
+def sample_data_label(data, label, num_sample):
+ new_data, sample_indices = sample_data(data, num_sample)
+ new_label = label[sample_indices]
+ return new_data, new_label
+
+def room2blocks(data, label, num_point, block_size=1.0, stride=1.0,
+ random_sample=False, sample_num=None, sample_aug=1):
+ """ Prepare block training data.
+ Args:
+ data: N x 6 numpy array, 012 are XYZ in meters, 345 are RGB in [0,1]
+ assumes the data is shifted (min point is origin) and aligned
+ (aligned with XYZ axis)
+ label: N size uint8 numpy array from 0-12
+ num_point: int, how many points to sample in each block
+ block_size: float, physical size of the block in meters
+ stride: float, stride for block sweeping
+ random_sample: bool, if True, we will randomly sample blocks in the room
+ sample_num: int, if random sample, how many blocks to sample
+ [default: room area]
+ sample_aug: if random sample, how much aug
+ Returns:
+ block_datas: K x num_point x 6 np array of XYZRGB, RGB is in [0,1]
+ block_labels: K x num_point x 1 np array of uint8 labels
+
+ TODO: for this version, blocking is in fixed, non-overlapping pattern.
+ """
+ assert(stride<=block_size)
+
+ limit = np.amax(data, 0)[0:3]
+
+ # Get the corner location for our sampling blocks
+ xbeg_list = []
+ ybeg_list = []
+ if not random_sample:
+ num_block_x = int(np.ceil((limit[0] - block_size) / stride)) + 1
+ num_block_y = int(np.ceil(collect_point_label(limit[1] - block_size) / stride)) + 1
+ for i in range(num_block_x):
+ for j in range(num_block_y):
+ xbeg_list.append(i*stride)
+ ybeg_list.append(j*stride)
+ else:
+ num_block_x = int(np.ceil(limit[0] / block_size))
+ num_block_y = int(np.ceil(limit[1] / block_size))
+ if sample_num is None:
+ sample_num = num_block_x * num_block_y * sample_aug
+ for _ in range(sample_num):
+ xbeg = np.random.uniform(-block_size, limit[0])
+ ybeg = np.random.uniform(-block_size, limit[1])
+ xbeg_list.append(xbeg)
+ ybeg_list.append(ybeg)
+
+ # Collect blocks
+ block_data_list = []
+ block_label_list = []
+ idx = 0
+ for idx in range(len(xbeg_list)):
+ xbeg = xbeg_list[idx]
+ ybeg = ybeg_list[idx]
+ xcond = (data[:,0]<=xbeg+block_size) & (data[:,0]>=xbeg)
+ ycond = (data[:,1]<=ybeg+block_size) & (data[:,1]>=ybeg)
+ cond = xcond & ycond
+ if np.sum(cond) < 100: # discard block if there are less than 100 pts.
+ continue
+
+ block_data = data[cond, :]
+ block_label = label[cond]
+
+ # randomly subsample data
+ block_data_sampled, block_label_sampled = \
+ sample_data_label(block_data, block_label, num_point)
+ block_data_list.append(np.expand_dims(block_data_sampled, 0))
+ block_label_list.append(np.expand_dims(block_label_sampled, 0))
+
+ return np.concatenate(block_data_list, 0), \
+ np.concatenate(block_label_list, 0)
+
+
+def room2blocks_plus(data_label, num_point, block_size, stride,
+ random_sample, sample_num, sample_aug):
+ """ room2block with input filename and RGB preprocessing.
+ """
+ data = data_label[:,0:6]
+ data[:,3:6] /= 255.0
+ label = data_label[:,-1].astype(np.uint8)
+
+ return room2blocks(data, label, num_point, block_size, stride,
+ random_sample, sample_num, sample_aug)
+
+def room2blocks_wrapper(data_label_filename, num_point, block_size=1.0, stride=1.0,
+ random_sample=False, sample_num=None, sample_aug=1):
+ if data_label_filename[-3:] == 'txt':
+ data_label = np.loadtxt(data_label_filename)
+ elif data_label_filename[-3:] == 'npy':
+ data_label = np.load(data_label_filename)
+ else:
+ print('Unknown file type! exiting.')
+ exit()
+ return room2blocks_plus(data_label, num_point, block_size, stride,
+ random_sample, sample_num, sample_aug)
+
+def room2blocks_plus_normalized(data_label, num_point, block_size, stride,
+ random_sample, sample_num, sample_aug):
+ """ room2block, with input filename and RGB preprocessing.
+ for each block centralize XYZ, add normalized XYZ as 678 channels
+ """
+ data = data_label[:,0:6]
+ data[:,3:6] /= 255.0
+ label = data_label[:,-1].astype(np.uint8)
+ max_room_x = max(data[:,0])
+ max_room_y = max(data[:,1])
+ max_room_z = max(data[:,2])
+
+ data_batch, label_batch = room2blocks(data, label, num_point, block_size, stride,
+ random_sample, sample_num, sample_aug)
+ new_data_batch = np.zeros((data_batch.shape[0], num_point, 9))
+ for b in range(data_batch.shape[0]):
+ new_data_batch[b, :, 6] = data_batch[b, :, 0]/max_room_x
+ new_data_batch[b, :, 7] = data_batch[b, :, 1]/max_room_y
+ new_data_batch[b, :, 8] = data_batch[b, :, 2]/max_room_z
+ minx = min(data_batch[b, :, 0])
+ miny = min(data_batch[b, :, 1])
+ data_batch[b, :, 0] -= (minx+block_size/2)
+ data_batch[b, :, 1] -= (miny+block_size/2)
+ new_data_batch[:, :, 0:6] = data_batch
+ return new_data_batch, label_batch
+
+
+def room2blocks_wrapper_normalized(data_label_filename, num_point, block_size=1.0, stride=1.0,
+ random_sample=False, sample_num=None, sample_aug=1):
+ if data_label_filename[-3:] == 'txt':
+ data_label = np.loadtxt(data_label_filename)
+ elif data_label_filename[-3:] == 'npy':
+ data_label = np.load(data_label_filename)
+ else:
+ print('Unknown file type! exiting.')
+ exit()
+ return room2blocks_plus_normalized(data_label, num_point, block_size, stride,
+ random_sample, sample_num, sample_aug)
+
+def room2samples(data, label, sample_num_point):
+ """ Prepare whole room samples.
+
+ Args:
+ data: N x 6 numpy array, 012 are XYZ in meters, 345 are RGB in [0,1]
+ assumes the data is shifted (min point is origin) and
+ aligned (aligned with XYZ axis)
+ label: N size uint8 numpy array from 0-12
+ sample_num_point: int, how many points to sample in each sample
+ Returns:
+ sample_datas: K x sample_num_point x 9
+ numpy array of XYZRGBX'Y'Z', RGB is in [0,1]
+ sample_labels: K x sample_num_point x 1 np array of uint8 labels
+ """
+ N = data.shape[0]
+ order = np.arange(N)
+ np.random.shuffle(order)
+ data = data[order, :]
+ label = label[order]
+
+ batch_num = int(np.ceil(N / float(sample_num_point)))
+ sample_datas = np.zeros((batch_num, sample_num_point, 6))
+ sample_labels = np.zeros((batch_num, sample_num_point, 1))
+
+ for i in range(batch_num):
+ beg_idx = i*sample_num_point
+ end_idx = min((i+1)*sample_num_point, N)
+ num = end_idx - beg_idx
+ sample_datas[i,0:num,:] = data[beg_idx:end_idx, :]
+ sample_labels[i,0:num,0] = label[beg_idx:end_idx]
+ if num < sample_num_point:
+ makeup_indices = np.random.choice(N, sample_num_point - num)
+ sample_datas[i,num:,:] = data[makeup_indices, :]
+ sample_labels[i,num:,0] = label[makeup_indices]
+ return sample_datas, sample_labels
+
+def room2samples_plus_normalized(data_label, num_point):
+ """ room2sample, with input filename and RGB preprocessing.
+ for each block centralize XYZ, add normalized XYZ as 678 channels
+ """
+ data = data_label[:,0:6]
+ data[:,3:6] /= 255.0
+ label = data_label[:,-1].astype(np.uint8)
+ max_room_x = max(data[:,0])
+ max_room_y = max(data[:,1])
+ max_room_z = max(data[:,2])
+ #print(max_room_x, max_room_y, max_room_z)
+
+ data_batch, label_batch = room2samples(data, label, num_point)
+ new_data_batch = np.zeros((data_batch.shape[0], num_point, 9))
+ for b in range(data_batch.shape[0]):
+ new_data_batch[b, :, 6] = data_batch[b, :, 0]/max_room_x
+ new_data_batch[b, :, 7] = data_batch[b, :, 1]/max_room_y
+ new_data_batch[b, :, 8] = data_batch[b, :, 2]/max_room_z
+ #minx = min(data_batch[b, :, 0])
+ #miny = min(data_batch[b, :, 1])
+ #data_batch[b, :, 0] -= (minx+block_size/2)
+ #data_batch[b, :, 1] -= (miny+block_size/2)
+ new_data_batch[:, :, 0:6] = data_batch
+ return new_data_batch, label_batch
+
+
+def room2samples_wrapper_normalized(data_label_filename, num_point):
+ if data_label_filename[-3:] == 'txt':
+ data_label = np.loadtxt(data_label_filename)
+ elif data_label_filename[-3:] == 'npy':
+ data_label = np.load(data_label_filename)
+ else:
+ print('Unknown file type! exiting.')
+ exit()
+ return room2samples_plus_normalized(data_label, num_point)
+
+
+# -----------------------------------------------------------------------------
+# EXTRACT INSTANCE BBOX FROM ORIGINAL DATA (for detection evaluation)
+# -----------------------------------------------------------------------------
+
+def collect_bounding_box(anno_path, out_filename):
+ """ Compute bounding boxes from each instance in original dataset files on
+ one room. **We assume the bbox is aligned with XYZ coordinate.**
+
+ Args:
+ anno_path: path to annotations. e.g. Area_1/office_2/Annotations/
+ out_filename: path to save instance bounding boxes for that room.
+ each line is x1 y1 z1 x2 y2 z2 label,
+ where (x1,y1,z1) is the point on the diagonal closer to origin
+ Returns:
+ None
+ Note:
+ room points are shifted, the most negative point is now at origin.
+ """
+ bbox_label_list = []
+
+ for f in glob.glob(os.path.join(anno_path, '*.txt')):
+ cls = os.path.basename(f).split('_')[0]
+ if cls not in g_classes: # note: in some room there is 'staris' class..
+ cls = 'clutter'
+ points = np.loadtxt(f)
+ label = g_class2label[cls]
+ # Compute tightest axis aligned bounding box
+ xyz_min = np.amin(points[:, 0:3], axis=0)
+ xyz_max = np.amax(points[:, 0:3], axis=0)
+ ins_bbox_label = np.expand_dims(
+ np.concatenate([xyz_min, xyz_max, np.array([label])], 0), 0)
+ bbox_label_list.append(ins_bbox_label)
+
+ bbox_label = np.concatenate(bbox_label_list, 0)
+ room_xyz_min = np.amin(bbox_label[:, 0:3], axis=0)
+ bbox_label[:, 0:3] -= room_xyz_min
+ bbox_label[:, 3:6] -= room_xyz_min
+
+ fout = open(out_filename, 'w')
+ for i in range(bbox_label.shape[0]):
+ fout.write('%f %f %f %f %f %f %d\n' % \
+ (bbox_label[i,0], bbox_label[i,1], bbox_label[i,2],
+ bbox_label[i,3], bbox_label[i,4], bbox_label[i,5],
+ bbox_label[i,6]))
+ fout.close()
+
+def bbox_label_to_obj(input_filename, out_filename_prefix, easy_view=False):
+ """ Visualization of bounding boxes.
+
+ Args:
+ input_filename: each line is x1 y1 z1 x2 y2 z2 label
+ out_filename_prefix: OBJ filename prefix,
+ visualize object by g_label2color
+ easy_view: if True, only visualize furniture and floor
+ Returns:
+ output a list of OBJ file and MTL files with the same prefix
+ """
+ bbox_label = np.loadtxt(input_filename)
+ bbox = bbox_label[:, 0:6]
+ label = bbox_label[:, -1].astype(int)
+ v_cnt = 0 # count vertex
+ ins_cnt = 0 # count instance
+ for i in range(bbox.shape[0]):
+ if easy_view and (label[i] not in g_easy_view_labels):
+ continue
+ obj_filename = out_filename_prefix+'_'+g_classes[label[i]]+'_'+str(ins_cnt)+'.obj'
+ mtl_filename = out_filename_prefix+'_'+g_classes[label[i]]+'_'+str(ins_cnt)+'.mtl'
+ fout_obj = open(obj_filename, 'w')
+ fout_mtl = open(mtl_filename, 'w')
+ fout_obj.write('mtllib %s\n' % (os.path.basename(mtl_filename)))
+
+ length = bbox[i, 3:6] - bbox[i, 0:3]
+ a = length[0]
+ b = length[1]
+ c = length[2]
+ x = bbox[i, 0]
+ y = bbox[i, 1]
+ z = bbox[i, 2]
+ color = np.array(g_label2color[label[i]], dtype=float) / 255.0
+
+ material = 'material%d' % (ins_cnt)
+ fout_obj.write('usemtl %s\n' % (material))
+ fout_obj.write('v %f %f %f\n' % (x,y,z+c))
+ fout_obj.write('v %f %f %f\n' % (x,y+b,z+c))
+ fout_obj.write('v %f %f %f\n' % (x+a,y+b,z+c))
+ fout_obj.write('v %f %f %f\n' % (x+a,y,z+c))
+ fout_obj.write('v %f %f %f\n' % (x,y,z))
+ fout_obj.write('v %f %f %f\n' % (x,y+b,z))
+ fout_obj.write('v %f %f %f\n' % (x+a,y+b,z))
+ fout_obj.write('v %f %f %f\n' % (x+a,y,z))
+ fout_obj.write('g default\n')
+ v_cnt = 0 # for individual box
+ fout_obj.write('f %d %d %d %d\n' % (4+v_cnt, 3+v_cnt, 2+v_cnt, 1+v_cnt))
+ fout_obj.write('f %d %d %d %d\n' % (1+v_cnt, 2+v_cnt, 6+v_cnt, 5+v_cnt))
+ fout_obj.write('f %d %d %d %d\n' % (7+v_cnt, 6+v_cnt, 2+v_cnt, 3+v_cnt))
+ fout_obj.write('f %d %d %d %d\n' % (4+v_cnt, 8+v_cnt, 7+v_cnt, 3+v_cnt))
+ fout_obj.write('f %d %d %d %d\n' % (5+v_cnt, 8+v_cnt, 4+v_cnt, 1+v_cnt))
+ fout_obj.write('f %d %d %d %d\n' % (5+v_cnt, 6+v_cnt, 7+v_cnt, 8+v_cnt))
+ fout_obj.write('\n')
+
+ fout_mtl.write('newmtl %s\n' % (material))
+ fout_mtl.write('Kd %f %f %f\n' % (color[0], color[1], color[2]))
+ fout_mtl.write('\n')
+ fout_obj.close()
+ fout_mtl.close()
+
+ v_cnt += 8
+ ins_cnt += 1
+
+def bbox_label_to_obj_room(input_filename, out_filename_prefix, easy_view=False, permute=None, center=False, exclude_table=False):
+ """ Visualization of bounding boxes.
+
+ Args:
+ input_filename: each line is x1 y1 z1 x2 y2 z2 label
+ out_filename_prefix: OBJ filename prefix,
+ visualize object by g_label2color
+ easy_view: if True, only visualize furniture and floor
+ permute: if not None, permute XYZ for rendering, e.g. [0 2 1]
+ center: if True, move obj to have zero origin
+ Returns:
+ output a list of OBJ file and MTL files with the same prefix
+ """
+ bbox_label = np.loadtxt(input_filename)
+ bbox = bbox_label[:, 0:6]
+ if permute is not None:
+ assert(len(permute)==3)
+ permute = np.array(permute)
+ bbox[:,0:3] = bbox[:,permute]
+ bbox[:,3:6] = bbox[:,permute+3]
+ if center:
+ xyz_max = np.amax(bbox[:,3:6], 0)
+ bbox[:,0:3] -= (xyz_max/2.0)
+ bbox[:,3:6] -= (xyz_max/2.0)
+ bbox /= np.max(xyz_max/2.0)
+ label = bbox_label[:, -1].astype(int)
+ obj_filename = out_filename_prefix+'.obj'
+ mtl_filename = out_filename_prefix+'.mtl'
+
+ fout_obj = open(obj_filename, 'w')
+ fout_mtl = open(mtl_filename, 'w')
+ fout_obj.write('mtllib %s\n' % (os.path.basename(mtl_filename)))
+ v_cnt = 0 # count vertex
+ ins_cnt = 0 # count instance
+ for i in range(bbox.shape[0]):
+ if easy_view and (label[i] not in g_easy_view_labels):
+ continue
+ if exclude_table and label[i] == g_classes.index('table'):
+ continue
+
+ length = bbox[i, 3:6] - bbox[i, 0:3]
+ a = length[0]
+ b = length[1]
+ c = length[2]
+ x = bbox[i, 0]
+ y = bbox[i, 1]
+ z = bbox[i, 2]
+ color = np.array(g_label2color[label[i]], dtype=float) / 255.0
+
+ material = 'material%d' % (ins_cnt)
+ fout_obj.write('usemtl %s\n' % (material))
+ fout_obj.write('v %f %f %f\n' % (x,y,z+c))
+ fout_obj.write('v %f %f %f\n' % (x,y+b,z+c))
+ fout_obj.write('v %f %f %f\n' % (x+a,y+b,z+c))
+ fout_obj.write('v %f %f %f\n' % (x+a,y,z+c))
+ fout_obj.write('v %f %f %f\n' % (x,y,z))
+ fout_obj.write('v %f %f %f\n' % (x,y+b,z))
+ fout_obj.write('v %f %f %f\n' % (x+a,y+b,z))
+ fout_obj.write('v %f %f %f\n' % (x+a,y,z))
+ fout_obj.write('g default\n')
+ fout_obj.write('f %d %d %d %d\n' % (4+v_cnt, 3+v_cnt, 2+v_cnt, 1+v_cnt))
+ fout_obj.write('f %d %d %d %d\n' % (1+v_cnt, 2+v_cnt, 6+v_cnt, 5+v_cnt))
+ fout_obj.write('f %d %d %d %d\n' % (7+v_cnt, 6+v_cnt, 2+v_cnt, 3+v_cnt))
+ fout_obj.write('f %d %d %d %d\n' % (4+v_cnt, 8+v_cnt, 7+v_cnt, 3+v_cnt))
+ fout_obj.write('f %d %d %d %d\n' % (5+v_cnt, 8+v_cnt, 4+v_cnt, 1+v_cnt))
+ fout_obj.write('f %d %d %d %d\n' % (5+v_cnt, 6+v_cnt, 7+v_cnt, 8+v_cnt))
+ fout_obj.write('\n')
+
+ fout_mtl.write('newmtl %s\n' % (material))
+ fout_mtl.write('Kd %f %f %f\n' % (color[0], color[1], color[2]))
+ fout_mtl.write('\n')
+
+ v_cnt += 8
+ ins_cnt += 1
+
+ fout_obj.close()
+ fout_mtl.close()
+
+
+def collect_point_bounding_box(anno_path, out_filename, file_format):
+ """ Compute bounding boxes from each instance in original dataset files on
+ one room. **We assume the bbox is aligned with XYZ coordinate.**
+ Save both the point XYZRGB and the bounding box for the point's
+ parent element.
+
+ Args:
+ anno_path: path to annotations. e.g. Area_1/office_2/Annotations/
+ out_filename: path to save instance bounding boxes for each point,
+ plus the point's XYZRGBL
+ each line is XYZRGBL offsetX offsetY offsetZ a b c,
+ where cx = X+offsetX, cy=X+offsetY, cz=Z+offsetZ
+ where (cx,cy,cz) is center of the box, a,b,c are distances from center
+ to the surfaces of the box, i.e. x1 = cx-a, x2 = cx+a, y1=cy-b etc.
+ file_format: output file format, txt or numpy
+ Returns:
+ None
+
+ Note:
+ room points are shifted, the most negative point is now at origin.
+ """
+ point_bbox_list = []
+
+ for f in glob.glob(os.path.join(anno_path, '*.txt')):
+ cls = os.path.basename(f).split('_')[0]
+ if cls not in g_classes: # note: in some room there is 'staris' class..
+ cls = 'clutter'
+ points = np.loadtxt(f) # Nx6
+ label = g_class2label[cls] # N,
+ # Compute tightest axis aligned bounding box
+ xyz_min = np.amin(points[:, 0:3], axis=0) # 3,
+ xyz_max = np.amax(points[:, 0:3], axis=0) # 3,
+ xyz_center = (xyz_min + xyz_max) / 2
+ dimension = (xyz_max - xyz_min) / 2
+
+ xyz_offsets = xyz_center - points[:,0:3] # Nx3
+ dimensions = np.ones((points.shape[0],3)) * dimension # Nx3
+ labels = np.ones((points.shape[0],1)) * label # N
+ point_bbox_list.append(np.concatenate([points, labels,
+ xyz_offsets, dimensions], 1)) # Nx13
+
+ point_bbox = np.concatenate(point_bbox_list, 0) # KxNx13
+ room_xyz_min = np.amin(point_bbox[:, 0:3], axis=0)
+ point_bbox[:, 0:3] -= room_xyz_min
+
+ if file_format == 'txt':
+ fout = open(out_filename, 'w')
+ for i in range(point_bbox.shape[0]):
+ fout.write('%f %f %f %d %d %d %d %f %f %f %f %f %f\n' % \
+ (point_bbox[i,0], point_bbox[i,1], point_bbox[i,2],
+ point_bbox[i,3], point_bbox[i,4], point_bbox[i,5],
+ point_bbox[i,6],
+ point_bbox[i,7], point_bbox[i,8], point_bbox[i,9],
+ point_bbox[i,10], point_bbox[i,11], point_bbox[i,12]))
+
+ fout.close()
+ elif file_format == 'numpy':
+ np.save(out_filename, point_bbox)
+ else:
+ print('ERROR!! Unknown file format: %s, please use txt or numpy.' % \
+ (file_format))
+ exit()
+
+
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/data_utils/meta/anno_paths.txt b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/data_utils/meta/anno_paths.txt
new file mode 100644
index 0000000000..0ad2f25994
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/data_utils/meta/anno_paths.txt
@@ -0,0 +1,272 @@
+Area_1/conferenceRoom_1/Annotations
+Area_1/conferenceRoom_2/Annotations
+Area_1/copyRoom_1/Annotations
+Area_1/hallway_1/Annotations
+Area_1/hallway_2/Annotations
+Area_1/hallway_3/Annotations
+Area_1/hallway_4/Annotations
+Area_1/hallway_5/Annotations
+Area_1/hallway_6/Annotations
+Area_1/hallway_7/Annotations
+Area_1/hallway_8/Annotations
+Area_1/office_10/Annotations
+Area_1/office_11/Annotations
+Area_1/office_12/Annotations
+Area_1/office_13/Annotations
+Area_1/office_14/Annotations
+Area_1/office_15/Annotations
+Area_1/office_16/Annotations
+Area_1/office_17/Annotations
+Area_1/office_18/Annotations
+Area_1/office_19/Annotations
+Area_1/office_1/Annotations
+Area_1/office_20/Annotations
+Area_1/office_21/Annotations
+Area_1/office_22/Annotations
+Area_1/office_23/Annotations
+Area_1/office_24/Annotations
+Area_1/office_25/Annotations
+Area_1/office_26/Annotations
+Area_1/office_27/Annotations
+Area_1/office_28/Annotations
+Area_1/office_29/Annotations
+Area_1/office_2/Annotations
+Area_1/office_30/Annotations
+Area_1/office_31/Annotations
+Area_1/office_3/Annotations
+Area_1/office_4/Annotations
+Area_1/office_5/Annotations
+Area_1/office_6/Annotations
+Area_1/office_7/Annotations
+Area_1/office_8/Annotations
+Area_1/office_9/Annotations
+Area_1/pantry_1/Annotations
+Area_1/WC_1/Annotations
+Area_2/auditorium_1/Annotations
+Area_2/auditorium_2/Annotations
+Area_2/conferenceRoom_1/Annotations
+Area_2/hallway_10/Annotations
+Area_2/hallway_11/Annotations
+Area_2/hallway_12/Annotations
+Area_2/hallway_1/Annotations
+Area_2/hallway_2/Annotations
+Area_2/hallway_3/Annotations
+Area_2/hallway_4/Annotations
+Area_2/hallway_5/Annotations
+Area_2/hallway_6/Annotations
+Area_2/hallway_7/Annotations
+Area_2/hallway_8/Annotations
+Area_2/hallway_9/Annotations
+Area_2/office_10/Annotations
+Area_2/office_11/Annotations
+Area_2/office_12/Annotations
+Area_2/office_13/Annotations
+Area_2/office_14/Annotations
+Area_2/office_1/Annotations
+Area_2/office_2/Annotations
+Area_2/office_3/Annotations
+Area_2/office_4/Annotations
+Area_2/office_5/Annotations
+Area_2/office_6/Annotations
+Area_2/office_7/Annotations
+Area_2/office_8/Annotations
+Area_2/office_9/Annotations
+Area_2/storage_1/Annotations
+Area_2/storage_2/Annotations
+Area_2/storage_3/Annotations
+Area_2/storage_4/Annotations
+Area_2/storage_5/Annotations
+Area_2/storage_6/Annotations
+Area_2/storage_7/Annotations
+Area_2/storage_8/Annotations
+Area_2/storage_9/Annotations
+Area_2/WC_1/Annotations
+Area_2/WC_2/Annotations
+Area_3/conferenceRoom_1/Annotations
+Area_3/hallway_1/Annotations
+Area_3/hallway_2/Annotations
+Area_3/hallway_3/Annotations
+Area_3/hallway_4/Annotations
+Area_3/hallway_5/Annotations
+Area_3/hallway_6/Annotations
+Area_3/lounge_1/Annotations
+Area_3/lounge_2/Annotations
+Area_3/office_10/Annotations
+Area_3/office_1/Annotations
+Area_3/office_2/Annotations
+Area_3/office_3/Annotations
+Area_3/office_4/Annotations
+Area_3/office_5/Annotations
+Area_3/office_6/Annotations
+Area_3/office_7/Annotations
+Area_3/office_8/Annotations
+Area_3/office_9/Annotations
+Area_3/storage_1/Annotations
+Area_3/storage_2/Annotations
+Area_3/WC_1/Annotations
+Area_3/WC_2/Annotations
+Area_4/conferenceRoom_1/Annotations
+Area_4/conferenceRoom_2/Annotations
+Area_4/conferenceRoom_3/Annotations
+Area_4/hallway_10/Annotations
+Area_4/hallway_11/Annotations
+Area_4/hallway_12/Annotations
+Area_4/hallway_13/Annotations
+Area_4/hallway_14/Annotations
+Area_4/hallway_1/Annotations
+Area_4/hallway_2/Annotations
+Area_4/hallway_3/Annotations
+Area_4/hallway_4/Annotations
+Area_4/hallway_5/Annotations
+Area_4/hallway_6/Annotations
+Area_4/hallway_7/Annotations
+Area_4/hallway_8/Annotations
+Area_4/hallway_9/Annotations
+Area_4/lobby_1/Annotations
+Area_4/lobby_2/Annotations
+Area_4/office_10/Annotations
+Area_4/office_11/Annotations
+Area_4/office_12/Annotations
+Area_4/office_13/Annotations
+Area_4/office_14/Annotations
+Area_4/office_15/Annotations
+Area_4/office_16/Annotations
+Area_4/office_17/Annotations
+Area_4/office_18/Annotations
+Area_4/office_19/Annotations
+Area_4/office_1/Annotations
+Area_4/office_20/Annotations
+Area_4/office_21/Annotations
+Area_4/office_22/Annotations
+Area_4/office_2/Annotations
+Area_4/office_3/Annotations
+Area_4/office_4/Annotations
+Area_4/office_5/Annotations
+Area_4/office_6/Annotations
+Area_4/office_7/Annotations
+Area_4/office_8/Annotations
+Area_4/office_9/Annotations
+Area_4/storage_1/Annotations
+Area_4/storage_2/Annotations
+Area_4/storage_3/Annotations
+Area_4/storage_4/Annotations
+Area_4/WC_1/Annotations
+Area_4/WC_2/Annotations
+Area_4/WC_3/Annotations
+Area_4/WC_4/Annotations
+Area_5/conferenceRoom_1/Annotations
+Area_5/conferenceRoom_2/Annotations
+Area_5/conferenceRoom_3/Annotations
+Area_5/hallway_10/Annotations
+Area_5/hallway_11/Annotations
+Area_5/hallway_12/Annotations
+Area_5/hallway_13/Annotations
+Area_5/hallway_14/Annotations
+Area_5/hallway_15/Annotations
+Area_5/hallway_1/Annotations
+Area_5/hallway_2/Annotations
+Area_5/hallway_3/Annotations
+Area_5/hallway_4/Annotations
+Area_5/hallway_5/Annotations
+Area_5/hallway_6/Annotations
+Area_5/hallway_7/Annotations
+Area_5/hallway_8/Annotations
+Area_5/hallway_9/Annotations
+Area_5/lobby_1/Annotations
+Area_5/office_10/Annotations
+Area_5/office_11/Annotations
+Area_5/office_12/Annotations
+Area_5/office_13/Annotations
+Area_5/office_14/Annotations
+Area_5/office_15/Annotations
+Area_5/office_16/Annotations
+Area_5/office_17/Annotations
+Area_5/office_18/Annotations
+Area_5/office_19/Annotations
+Area_5/office_1/Annotations
+Area_5/office_20/Annotations
+Area_5/office_21/Annotations
+Area_5/office_22/Annotations
+Area_5/office_23/Annotations
+Area_5/office_24/Annotations
+Area_5/office_25/Annotations
+Area_5/office_26/Annotations
+Area_5/office_27/Annotations
+Area_5/office_28/Annotations
+Area_5/office_29/Annotations
+Area_5/office_2/Annotations
+Area_5/office_30/Annotations
+Area_5/office_31/Annotations
+Area_5/office_32/Annotations
+Area_5/office_33/Annotations
+Area_5/office_34/Annotations
+Area_5/office_35/Annotations
+Area_5/office_36/Annotations
+Area_5/office_37/Annotations
+Area_5/office_38/Annotations
+Area_5/office_39/Annotations
+Area_5/office_3/Annotations
+Area_5/office_40/Annotations
+Area_5/office_41/Annotations
+Area_5/office_42/Annotations
+Area_5/office_4/Annotations
+Area_5/office_5/Annotations
+Area_5/office_6/Annotations
+Area_5/office_7/Annotations
+Area_5/office_8/Annotations
+Area_5/office_9/Annotations
+Area_5/pantry_1/Annotations
+Area_5/storage_1/Annotations
+Area_5/storage_2/Annotations
+Area_5/storage_3/Annotations
+Area_5/storage_4/Annotations
+Area_5/WC_1/Annotations
+Area_5/WC_2/Annotations
+Area_6/conferenceRoom_1/Annotations
+Area_6/copyRoom_1/Annotations
+Area_6/hallway_1/Annotations
+Area_6/hallway_2/Annotations
+Area_6/hallway_3/Annotations
+Area_6/hallway_4/Annotations
+Area_6/hallway_5/Annotations
+Area_6/hallway_6/Annotations
+Area_6/lounge_1/Annotations
+Area_6/office_10/Annotations
+Area_6/office_11/Annotations
+Area_6/office_12/Annotations
+Area_6/office_13/Annotations
+Area_6/office_14/Annotations
+Area_6/office_15/Annotations
+Area_6/office_16/Annotations
+Area_6/office_17/Annotations
+Area_6/office_18/Annotations
+Area_6/office_19/Annotations
+Area_6/office_1/Annotations
+Area_6/office_20/Annotations
+Area_6/office_21/Annotations
+Area_6/office_22/Annotations
+Area_6/office_23/Annotations
+Area_6/office_24/Annotations
+Area_6/office_25/Annotations
+Area_6/office_26/Annotations
+Area_6/office_27/Annotations
+Area_6/office_28/Annotations
+Area_6/office_29/Annotations
+Area_6/office_2/Annotations
+Area_6/office_30/Annotations
+Area_6/office_31/Annotations
+Area_6/office_32/Annotations
+Area_6/office_33/Annotations
+Area_6/office_34/Annotations
+Area_6/office_35/Annotations
+Area_6/office_36/Annotations
+Area_6/office_37/Annotations
+Area_6/office_3/Annotations
+Area_6/office_4/Annotations
+Area_6/office_5/Annotations
+Area_6/office_6/Annotations
+Area_6/office_7/Annotations
+Area_6/office_8/Annotations
+Area_6/office_9/Annotations
+Area_6/openspace_1/Annotations
+Area_6/pantry_1/Annotations
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/data_utils/meta/class_names.txt b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/data_utils/meta/class_names.txt
new file mode 100644
index 0000000000..ca1d178828
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/data_utils/meta/class_names.txt
@@ -0,0 +1,13 @@
+ceiling
+floor
+wall
+beam
+column
+window
+door
+table
+chair
+sofa
+bookcase
+board
+clutter
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/classification/pointnet2_msg_normals/checkpoints/best_model.pth b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/classification/pointnet2_msg_normals/checkpoints/best_model.pth
new file mode 100644
index 0000000000..a427516104
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/classification/pointnet2_msg_normals/checkpoints/best_model.pth differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/classification/pointnet2_msg_normals/logs/pointnet2_cls_msg.txt b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/classification/pointnet2_msg_normals/logs/pointnet2_cls_msg.txt
new file mode 100644
index 0000000000..a2e19191b6
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/classification/pointnet2_msg_normals/logs/pointnet2_cls_msg.txt
@@ -0,0 +1,852 @@
+2021-03-26 20:54:11,106 - Model - INFO - PARAMETER ...
+2021-03-26 20:54:11,106 - Model - INFO - Namespace(batch_size=24, decay_rate=0.0001, epoch=200, gpu='2', learning_rate=0.001, log_dir='pointnet2_msg_normals', model='pointnet2_cls_msg', num_category=40, num_point=1024, optimizer='Adam', process_data=True, use_cpu=False, use_normals=True, use_uniform_sample=False)
+2021-03-26 20:54:11,106 - Model - INFO - Load dataset ...
+2021-03-26 20:54:33,594 - Model - INFO - No existing model, starting training from scratch...
+2021-03-26 20:54:33,596 - Model - INFO - Start training...
+2021-03-26 20:54:33,596 - Model - INFO - Epoch 1 (1/200):
+2021-03-26 20:57:40,380 - Model - INFO - Train Instance Accuracy: 0.571037
+2021-03-26 20:58:16,623 - Model - INFO - Test Instance Accuracy: 0.718528, Class Accuracy: 0.627357
+2021-03-26 20:58:16,623 - Model - INFO - Best Instance Accuracy: 0.718528, Class Accuracy: 0.627357
+2021-03-26 20:58:16,623 - Model - INFO - Save model...
+2021-03-26 20:58:16,623 - Model - INFO - Saving at log/classification/pointnet2_msg_normals/checkpoints/best_model.pth
+2021-03-26 20:58:16,698 - Model - INFO - Epoch 2 (2/200):
+2021-03-26 21:01:26,685 - Model - INFO - Train Instance Accuracy: 0.727947
+2021-03-26 21:02:03,642 - Model - INFO - Test Instance Accuracy: 0.790858, Class Accuracy: 0.702316
+2021-03-26 21:02:03,642 - Model - INFO - Best Instance Accuracy: 0.790858, Class Accuracy: 0.702316
+2021-03-26 21:02:03,642 - Model - INFO - Save model...
+2021-03-26 21:02:03,643 - Model - INFO - Saving at log/classification/pointnet2_msg_normals/checkpoints/best_model.pth
+2021-03-26 21:02:03,746 - Model - INFO - Epoch 3 (3/200):
+2021-03-26 21:05:15,349 - Model - INFO - Train Instance Accuracy: 0.781606
+2021-03-26 21:05:51,538 - Model - INFO - Test Instance Accuracy: 0.803641, Class Accuracy: 0.738575
+2021-03-26 21:05:51,538 - Model - INFO - Best Instance Accuracy: 0.803641, Class Accuracy: 0.738575
+2021-03-26 21:05:51,539 - Model - INFO - Save model...
+2021-03-26 21:05:51,539 - Model - INFO - Saving at log/classification/pointnet2_msg_normals/checkpoints/best_model.pth
+2021-03-26 21:05:51,623 - Model - INFO - Epoch 4 (4/200):
+2021-03-26 21:08:58,757 - Model - INFO - Train Instance Accuracy: 0.803354
+2021-03-26 21:09:34,976 - Model - INFO - Test Instance Accuracy: 0.856230, Class Accuracy: 0.790368
+2021-03-26 21:09:34,977 - Model - INFO - Best Instance Accuracy: 0.856230, Class Accuracy: 0.790368
+2021-03-26 21:09:34,977 - Model - INFO - Save model...
+2021-03-26 21:09:34,977 - Model - INFO - Saving at log/classification/pointnet2_msg_normals/checkpoints/best_model.pth
+2021-03-26 21:09:35,060 - Model - INFO - Epoch 5 (5/200):
+2021-03-26 21:12:43,003 - Model - INFO - Train Instance Accuracy: 0.816667
+2021-03-26 21:13:18,956 - Model - INFO - Test Instance Accuracy: 0.864401, Class Accuracy: 0.780697
+2021-03-26 21:13:18,956 - Model - INFO - Best Instance Accuracy: 0.864401, Class Accuracy: 0.790368
+2021-03-26 21:13:18,956 - Model - INFO - Save model...
+2021-03-26 21:13:18,956 - Model - INFO - Saving at log/classification/pointnet2_msg_normals/checkpoints/best_model.pth
+2021-03-26 21:13:19,048 - Model - INFO - Epoch 6 (6/200):
+2021-03-26 21:16:25,320 - Model - INFO - Train Instance Accuracy: 0.828557
+2021-03-26 21:17:00,926 - Model - INFO - Test Instance Accuracy: 0.830825, Class Accuracy: 0.764993
+2021-03-26 21:17:00,926 - Model - INFO - Best Instance Accuracy: 0.864401, Class Accuracy: 0.790368
+2021-03-26 21:17:00,927 - Model - INFO - Epoch 7 (7/200):
+2021-03-26 21:20:08,170 - Model - INFO - Train Instance Accuracy: 0.847561
+2021-03-26 21:20:46,723 - Model - INFO - Test Instance Accuracy: 0.865858, Class Accuracy: 0.822333
+2021-03-26 21:20:46,724 - Model - INFO - Best Instance Accuracy: 0.865858, Class Accuracy: 0.822333
+2021-03-26 21:20:46,724 - Model - INFO - Save model...
+2021-03-26 21:20:46,724 - Model - INFO - Saving at log/classification/pointnet2_msg_normals/checkpoints/best_model.pth
+2021-03-26 21:20:47,124 - Model - INFO - Epoch 8 (8/200):
+2021-03-26 21:23:58,332 - Model - INFO - Train Instance Accuracy: 0.846850
+2021-03-26 21:24:35,002 - Model - INFO - Test Instance Accuracy: 0.882282, Class Accuracy: 0.823991
+2021-03-26 21:24:35,002 - Model - INFO - Best Instance Accuracy: 0.882282, Class Accuracy: 0.823991
+2021-03-26 21:24:35,003 - Model - INFO - Save model...
+2021-03-26 21:24:35,003 - Model - INFO - Saving at log/classification/pointnet2_msg_normals/checkpoints/best_model.pth
+2021-03-26 21:24:35,117 - Model - INFO - Epoch 9 (9/200):
+2021-03-26 21:27:43,289 - Model - INFO - Train Instance Accuracy: 0.855691
+2021-03-26 21:28:20,254 - Model - INFO - Test Instance Accuracy: 0.886327, Class Accuracy: 0.839216
+2021-03-26 21:28:20,255 - Model - INFO - Best Instance Accuracy: 0.886327, Class Accuracy: 0.839216
+2021-03-26 21:28:20,255 - Model - INFO - Save model...
+2021-03-26 21:28:20,255 - Model - INFO - Saving at log/classification/pointnet2_msg_normals/checkpoints/best_model.pth
+2021-03-26 21:28:20,345 - Model - INFO - Epoch 10 (10/200):
+2021-03-26 21:31:29,863 - Model - INFO - Train Instance Accuracy: 0.863110
+2021-03-26 21:32:05,643 - Model - INFO - Test Instance Accuracy: 0.863107, Class Accuracy: 0.815547
+2021-03-26 21:32:05,643 - Model - INFO - Best Instance Accuracy: 0.886327, Class Accuracy: 0.839216
+2021-03-26 21:32:05,643 - Model - INFO - Epoch 11 (11/200):
+2021-03-26 21:35:14,710 - Model - INFO - Train Instance Accuracy: 0.858740
+2021-03-26 21:35:50,436 - Model - INFO - Test Instance Accuracy: 0.875890, Class Accuracy: 0.818452
+2021-03-26 21:35:50,437 - Model - INFO - Best Instance Accuracy: 0.886327, Class Accuracy: 0.839216
+2021-03-26 21:35:50,437 - Model - INFO - Epoch 12 (12/200):
+2021-03-26 21:38:58,548 - Model - INFO - Train Instance Accuracy: 0.867683
+2021-03-26 21:39:36,119 - Model - INFO - Test Instance Accuracy: 0.874110, Class Accuracy: 0.821763
+2021-03-26 21:39:36,119 - Model - INFO - Best Instance Accuracy: 0.886327, Class Accuracy: 0.839216
+2021-03-26 21:39:36,119 - Model - INFO - Epoch 13 (13/200):
+2021-03-26 21:42:48,091 - Model - INFO - Train Instance Accuracy: 0.867785
+2021-03-26 21:43:24,953 - Model - INFO - Test Instance Accuracy: 0.900647, Class Accuracy: 0.855812
+2021-03-26 21:43:24,953 - Model - INFO - Best Instance Accuracy: 0.900647, Class Accuracy: 0.855812
+2021-03-26 21:43:24,953 - Model - INFO - Save model...
+2021-03-26 21:43:24,953 - Model - INFO - Saving at log/classification/pointnet2_msg_normals/checkpoints/best_model.pth
+2021-03-26 21:43:25,035 - Model - INFO - Epoch 14 (14/200):
+2021-03-26 21:46:34,097 - Model - INFO - Train Instance Accuracy: 0.874187
+2021-03-26 21:47:10,364 - Model - INFO - Test Instance Accuracy: 0.883819, Class Accuracy: 0.820988
+2021-03-26 21:47:10,364 - Model - INFO - Best Instance Accuracy: 0.900647, Class Accuracy: 0.855812
+2021-03-26 21:47:10,365 - Model - INFO - Epoch 15 (15/200):
+2021-03-26 21:50:19,396 - Model - INFO - Train Instance Accuracy: 0.870732
+2021-03-26 21:50:56,062 - Model - INFO - Test Instance Accuracy: 0.873382, Class Accuracy: 0.837581
+2021-03-26 21:50:56,063 - Model - INFO - Best Instance Accuracy: 0.900647, Class Accuracy: 0.855812
+2021-03-26 21:50:56,063 - Model - INFO - Epoch 16 (16/200):
+2021-03-26 21:54:03,232 - Model - INFO - Train Instance Accuracy: 0.881504
+2021-03-26 21:54:38,962 - Model - INFO - Test Instance Accuracy: 0.886408, Class Accuracy: 0.846747
+2021-03-26 21:54:38,963 - Model - INFO - Best Instance Accuracy: 0.900647, Class Accuracy: 0.855812
+2021-03-26 21:54:38,963 - Model - INFO - Epoch 17 (17/200):
+2021-03-26 21:57:46,429 - Model - INFO - Train Instance Accuracy: 0.885874
+2021-03-26 21:58:22,575 - Model - INFO - Test Instance Accuracy: 0.899757, Class Accuracy: 0.868086
+2021-03-26 21:58:22,576 - Model - INFO - Best Instance Accuracy: 0.900647, Class Accuracy: 0.868086
+2021-03-26 21:58:22,576 - Model - INFO - Epoch 18 (18/200):
+2021-03-26 22:01:32,638 - Model - INFO - Train Instance Accuracy: 0.877744
+2021-03-26 22:02:09,306 - Model - INFO - Test Instance Accuracy: 0.906472, Class Accuracy: 0.869513
+2021-03-26 22:02:09,307 - Model - INFO - Best Instance Accuracy: 0.906472, Class Accuracy: 0.869513
+2021-03-26 22:02:09,307 - Model - INFO - Save model...
+2021-03-26 22:02:09,307 - Model - INFO - Saving at log/classification/pointnet2_msg_normals/checkpoints/best_model.pth
+2021-03-26 22:02:09,711 - Model - INFO - Epoch 19 (19/200):
+2021-03-26 22:05:20,767 - Model - INFO - Train Instance Accuracy: 0.882317
+2021-03-26 22:05:56,741 - Model - INFO - Test Instance Accuracy: 0.877832, Class Accuracy: 0.834486
+2021-03-26 22:05:56,742 - Model - INFO - Best Instance Accuracy: 0.906472, Class Accuracy: 0.869513
+2021-03-26 22:05:56,742 - Model - INFO - Epoch 20 (20/200):
+2021-03-26 22:09:07,947 - Model - INFO - Train Instance Accuracy: 0.902541
+2021-03-26 22:09:44,219 - Model - INFO - Test Instance Accuracy: 0.912136, Class Accuracy: 0.879158
+2021-03-26 22:09:44,219 - Model - INFO - Best Instance Accuracy: 0.912136, Class Accuracy: 0.879158
+2021-03-26 22:09:44,219 - Model - INFO - Save model...
+2021-03-26 22:09:44,219 - Model - INFO - Saving at log/classification/pointnet2_msg_normals/checkpoints/best_model.pth
+2021-03-26 22:09:44,304 - Model - INFO - Epoch 21 (21/200):
+2021-03-26 22:12:54,359 - Model - INFO - Train Instance Accuracy: 0.904573
+2021-03-26 22:13:30,840 - Model - INFO - Test Instance Accuracy: 0.896602, Class Accuracy: 0.871467
+2021-03-26 22:13:30,840 - Model - INFO - Best Instance Accuracy: 0.912136, Class Accuracy: 0.879158
+2021-03-26 22:13:30,840 - Model - INFO - Epoch 22 (22/200):
+2021-03-26 22:16:38,725 - Model - INFO - Train Instance Accuracy: 0.904878
+2021-03-26 22:17:15,294 - Model - INFO - Test Instance Accuracy: 0.907362, Class Accuracy: 0.868552
+2021-03-26 22:17:15,294 - Model - INFO - Best Instance Accuracy: 0.912136, Class Accuracy: 0.879158
+2021-03-26 22:17:15,295 - Model - INFO - Epoch 23 (23/200):
+2021-03-26 22:20:24,049 - Model - INFO - Train Instance Accuracy: 0.904980
+2021-03-26 22:21:01,033 - Model - INFO - Test Instance Accuracy: 0.914725, Class Accuracy: 0.874573
+2021-03-26 22:21:01,033 - Model - INFO - Best Instance Accuracy: 0.914725, Class Accuracy: 0.879158
+2021-03-26 22:21:01,033 - Model - INFO - Save model...
+2021-03-26 22:21:01,033 - Model - INFO - Saving at log/classification/pointnet2_msg_normals/checkpoints/best_model.pth
+2021-03-26 22:21:01,122 - Model - INFO - Epoch 24 (24/200):
+2021-03-26 22:24:11,711 - Model - INFO - Train Instance Accuracy: 0.910671
+2021-03-26 22:24:49,003 - Model - INFO - Test Instance Accuracy: 0.896359, Class Accuracy: 0.873731
+2021-03-26 22:24:49,004 - Model - INFO - Best Instance Accuracy: 0.914725, Class Accuracy: 0.879158
+2021-03-26 22:24:49,004 - Model - INFO - Epoch 25 (25/200):
+2021-03-26 22:27:57,912 - Model - INFO - Train Instance Accuracy: 0.905996
+2021-03-26 22:28:34,172 - Model - INFO - Test Instance Accuracy: 0.912702, Class Accuracy: 0.886490
+2021-03-26 22:28:34,172 - Model - INFO - Best Instance Accuracy: 0.914725, Class Accuracy: 0.886490
+2021-03-26 22:28:34,173 - Model - INFO - Epoch 26 (26/200):
+2021-03-26 22:31:43,785 - Model - INFO - Train Instance Accuracy: 0.912398
+2021-03-26 22:32:20,183 - Model - INFO - Test Instance Accuracy: 0.908900, Class Accuracy: 0.869426
+2021-03-26 22:32:20,183 - Model - INFO - Best Instance Accuracy: 0.914725, Class Accuracy: 0.886490
+2021-03-26 22:32:20,183 - Model - INFO - Epoch 27 (27/200):
+2021-03-26 22:35:27,622 - Model - INFO - Train Instance Accuracy: 0.910671
+2021-03-26 22:36:03,138 - Model - INFO - Test Instance Accuracy: 0.907120, Class Accuracy: 0.880002
+2021-03-26 22:36:03,139 - Model - INFO - Best Instance Accuracy: 0.914725, Class Accuracy: 0.886490
+2021-03-26 22:36:03,139 - Model - INFO - Epoch 28 (28/200):
+2021-03-26 22:39:10,245 - Model - INFO - Train Instance Accuracy: 0.910772
+2021-03-26 22:39:46,956 - Model - INFO - Test Instance Accuracy: 0.913026, Class Accuracy: 0.871528
+2021-03-26 22:39:46,956 - Model - INFO - Best Instance Accuracy: 0.914725, Class Accuracy: 0.886490
+2021-03-26 22:39:46,957 - Model - INFO - Epoch 29 (29/200):
+2021-03-26 22:42:59,025 - Model - INFO - Train Instance Accuracy: 0.910976
+2021-03-26 22:43:36,125 - Model - INFO - Test Instance Accuracy: 0.912055, Class Accuracy: 0.870143
+2021-03-26 22:43:36,127 - Model - INFO - Best Instance Accuracy: 0.914725, Class Accuracy: 0.886490
+2021-03-26 22:43:36,127 - Model - INFO - Epoch 30 (30/200):
+2021-03-26 22:46:45,436 - Model - INFO - Train Instance Accuracy: 0.915955
+2021-03-26 22:47:21,587 - Model - INFO - Test Instance Accuracy: 0.905583, Class Accuracy: 0.891074
+2021-03-26 22:47:21,587 - Model - INFO - Best Instance Accuracy: 0.914725, Class Accuracy: 0.891074
+2021-03-26 22:47:21,587 - Model - INFO - Epoch 31 (31/200):
+2021-03-26 22:50:31,024 - Model - INFO - Train Instance Accuracy: 0.911179
+2021-03-26 22:51:07,175 - Model - INFO - Test Instance Accuracy: 0.903155, Class Accuracy: 0.863230
+2021-03-26 22:51:07,176 - Model - INFO - Best Instance Accuracy: 0.914725, Class Accuracy: 0.891074
+2021-03-26 22:51:07,176 - Model - INFO - Epoch 32 (32/200):
+2021-03-26 22:54:15,092 - Model - INFO - Train Instance Accuracy: 0.920224
+2021-03-26 22:54:51,066 - Model - INFO - Test Instance Accuracy: 0.907929, Class Accuracy: 0.863417
+2021-03-26 22:54:51,066 - Model - INFO - Best Instance Accuracy: 0.914725, Class Accuracy: 0.891074
+2021-03-26 22:54:51,066 - Model - INFO - Epoch 33 (33/200):
+2021-03-26 22:57:56,800 - Model - INFO - Train Instance Accuracy: 0.918089
+2021-03-26 22:58:33,580 - Model - INFO - Test Instance Accuracy: 0.916424, Class Accuracy: 0.878601
+2021-03-26 22:58:33,583 - Model - INFO - Best Instance Accuracy: 0.916424, Class Accuracy: 0.891074
+2021-03-26 22:58:33,583 - Model - INFO - Save model...
+2021-03-26 22:58:33,584 - Model - INFO - Saving at log/classification/pointnet2_msg_normals/checkpoints/best_model.pth
+2021-03-26 22:58:33,680 - Model - INFO - Epoch 34 (34/200):
+2021-03-26 23:01:44,272 - Model - INFO - Train Instance Accuracy: 0.919106
+2021-03-26 23:02:21,731 - Model - INFO - Test Instance Accuracy: 0.901375, Class Accuracy: 0.866229
+2021-03-26 23:02:21,731 - Model - INFO - Best Instance Accuracy: 0.916424, Class Accuracy: 0.891074
+2021-03-26 23:02:21,731 - Model - INFO - Epoch 35 (35/200):
+2021-03-26 23:05:32,231 - Model - INFO - Train Instance Accuracy: 0.918598
+2021-03-26 23:06:08,057 - Model - INFO - Test Instance Accuracy: 0.906796, Class Accuracy: 0.871741
+2021-03-26 23:06:08,058 - Model - INFO - Best Instance Accuracy: 0.916424, Class Accuracy: 0.891074
+2021-03-26 23:06:08,058 - Model - INFO - Epoch 36 (36/200):
+2021-03-26 23:09:17,418 - Model - INFO - Train Instance Accuracy: 0.922256
+2021-03-26 23:09:53,682 - Model - INFO - Test Instance Accuracy: 0.917638, Class Accuracy: 0.875782
+2021-03-26 23:09:53,682 - Model - INFO - Best Instance Accuracy: 0.917638, Class Accuracy: 0.891074
+2021-03-26 23:09:53,683 - Model - INFO - Save model...
+2021-03-26 23:09:53,683 - Model - INFO - Saving at log/classification/pointnet2_msg_normals/checkpoints/best_model.pth
+2021-03-26 23:09:53,789 - Model - INFO - Epoch 37 (37/200):
+2021-03-26 23:13:03,138 - Model - INFO - Train Instance Accuracy: 0.919411
+2021-03-26 23:13:39,821 - Model - INFO - Test Instance Accuracy: 0.907201, Class Accuracy: 0.871139
+2021-03-26 23:13:39,821 - Model - INFO - Best Instance Accuracy: 0.917638, Class Accuracy: 0.891074
+2021-03-26 23:13:39,821 - Model - INFO - Epoch 38 (38/200):
+2021-03-26 23:16:46,471 - Model - INFO - Train Instance Accuracy: 0.920224
+2021-03-26 23:17:23,242 - Model - INFO - Test Instance Accuracy: 0.901052, Class Accuracy: 0.881506
+2021-03-26 23:17:23,242 - Model - INFO - Best Instance Accuracy: 0.917638, Class Accuracy: 0.891074
+2021-03-26 23:17:23,242 - Model - INFO - Epoch 39 (39/200):
+2021-03-26 23:20:32,117 - Model - INFO - Train Instance Accuracy: 0.924593
+2021-03-26 23:21:09,040 - Model - INFO - Test Instance Accuracy: 0.913916, Class Accuracy: 0.877208
+2021-03-26 23:21:09,041 - Model - INFO - Best Instance Accuracy: 0.917638, Class Accuracy: 0.891074
+2021-03-26 23:21:09,041 - Model - INFO - Epoch 40 (40/200):
+2021-03-26 23:24:20,137 - Model - INFO - Train Instance Accuracy: 0.936789
+2021-03-26 23:24:56,053 - Model - INFO - Test Instance Accuracy: 0.919903, Class Accuracy: 0.881509
+2021-03-26 23:24:56,053 - Model - INFO - Best Instance Accuracy: 0.919903, Class Accuracy: 0.891074
+2021-03-26 23:24:56,053 - Model - INFO - Save model...
+2021-03-26 23:24:56,053 - Model - INFO - Saving at log/classification/pointnet2_msg_normals/checkpoints/best_model.pth
+2021-03-26 23:24:56,416 - Model - INFO - Epoch 41 (41/200):
+2021-03-26 23:28:04,710 - Model - INFO - Train Instance Accuracy: 0.936484
+2021-03-26 23:28:41,411 - Model - INFO - Test Instance Accuracy: 0.921926, Class Accuracy: 0.885638
+2021-03-26 23:28:41,411 - Model - INFO - Best Instance Accuracy: 0.921926, Class Accuracy: 0.891074
+2021-03-26 23:28:41,411 - Model - INFO - Save model...
+2021-03-26 23:28:41,411 - Model - INFO - Saving at log/classification/pointnet2_msg_normals/checkpoints/best_model.pth
+2021-03-26 23:28:41,518 - Model - INFO - Epoch 42 (42/200):
+2021-03-26 23:31:50,730 - Model - INFO - Train Instance Accuracy: 0.935976
+2021-03-26 23:32:26,499 - Model - INFO - Test Instance Accuracy: 0.914644, Class Accuracy: 0.883458
+2021-03-26 23:32:26,499 - Model - INFO - Best Instance Accuracy: 0.921926, Class Accuracy: 0.891074
+2021-03-26 23:32:26,499 - Model - INFO - Epoch 43 (43/200):
+2021-03-26 23:35:33,565 - Model - INFO - Train Instance Accuracy: 0.938923
+2021-03-26 23:36:08,752 - Model - INFO - Test Instance Accuracy: 0.918447, Class Accuracy: 0.885162
+2021-03-26 23:36:08,753 - Model - INFO - Best Instance Accuracy: 0.921926, Class Accuracy: 0.891074
+2021-03-26 23:36:08,754 - Model - INFO - Epoch 44 (44/200):
+2021-03-26 23:39:16,052 - Model - INFO - Train Instance Accuracy: 0.937297
+2021-03-26 23:39:52,624 - Model - INFO - Test Instance Accuracy: 0.917880, Class Accuracy: 0.890004
+2021-03-26 23:39:52,625 - Model - INFO - Best Instance Accuracy: 0.921926, Class Accuracy: 0.891074
+2021-03-26 23:39:52,625 - Model - INFO - Epoch 45 (45/200):
+2021-03-26 23:43:03,744 - Model - INFO - Train Instance Accuracy: 0.937805
+2021-03-26 23:43:41,077 - Model - INFO - Test Instance Accuracy: 0.909547, Class Accuracy: 0.874115
+2021-03-26 23:43:41,077 - Model - INFO - Best Instance Accuracy: 0.921926, Class Accuracy: 0.891074
+2021-03-26 23:43:41,078 - Model - INFO - Epoch 46 (46/200):
+2021-03-26 23:46:50,857 - Model - INFO - Train Instance Accuracy: 0.943902
+2021-03-26 23:47:27,208 - Model - INFO - Test Instance Accuracy: 0.915777, Class Accuracy: 0.884084
+2021-03-26 23:47:27,208 - Model - INFO - Best Instance Accuracy: 0.921926, Class Accuracy: 0.891074
+2021-03-26 23:47:27,208 - Model - INFO - Epoch 47 (47/200):
+2021-03-26 23:50:36,685 - Model - INFO - Train Instance Accuracy: 0.940142
+2021-03-26 23:51:14,821 - Model - INFO - Test Instance Accuracy: 0.919498, Class Accuracy: 0.878956
+2021-03-26 23:51:14,822 - Model - INFO - Best Instance Accuracy: 0.921926, Class Accuracy: 0.891074
+2021-03-26 23:51:14,822 - Model - INFO - Epoch 48 (48/200):
+2021-03-26 23:54:24,458 - Model - INFO - Train Instance Accuracy: 0.939228
+2021-03-26 23:54:59,905 - Model - INFO - Test Instance Accuracy: 0.921197, Class Accuracy: 0.885728
+2021-03-26 23:54:59,905 - Model - INFO - Best Instance Accuracy: 0.921926, Class Accuracy: 0.891074
+2021-03-26 23:54:59,905 - Model - INFO - Epoch 49 (49/200):
+2021-03-26 23:58:06,541 - Model - INFO - Train Instance Accuracy: 0.940955
+2021-03-26 23:58:42,691 - Model - INFO - Test Instance Accuracy: 0.920712, Class Accuracy: 0.886908
+2021-03-26 23:58:42,691 - Model - INFO - Best Instance Accuracy: 0.921926, Class Accuracy: 0.891074
+2021-03-26 23:58:42,692 - Model - INFO - Epoch 50 (50/200):
+2021-03-27 00:01:54,122 - Model - INFO - Train Instance Accuracy: 0.942683
+2021-03-27 00:02:31,872 - Model - INFO - Test Instance Accuracy: 0.913511, Class Accuracy: 0.883098
+2021-03-27 00:02:31,872 - Model - INFO - Best Instance Accuracy: 0.921926, Class Accuracy: 0.891074
+2021-03-27 00:02:31,872 - Model - INFO - Epoch 51 (51/200):
+2021-03-27 00:05:40,385 - Model - INFO - Train Instance Accuracy: 0.939228
+2021-03-27 00:06:17,440 - Model - INFO - Test Instance Accuracy: 0.915858, Class Accuracy: 0.880032
+2021-03-27 00:06:17,441 - Model - INFO - Best Instance Accuracy: 0.921926, Class Accuracy: 0.891074
+2021-03-27 00:06:17,442 - Model - INFO - Epoch 52 (52/200):
+2021-03-27 00:09:26,906 - Model - INFO - Train Instance Accuracy: 0.942175
+2021-03-27 00:10:02,631 - Model - INFO - Test Instance Accuracy: 0.912379, Class Accuracy: 0.875228
+2021-03-27 00:10:02,631 - Model - INFO - Best Instance Accuracy: 0.921926, Class Accuracy: 0.891074
+2021-03-27 00:10:02,631 - Model - INFO - Epoch 53 (53/200):
+2021-03-27 00:13:11,579 - Model - INFO - Train Instance Accuracy: 0.942683
+2021-03-27 00:13:47,596 - Model - INFO - Test Instance Accuracy: 0.918123, Class Accuracy: 0.881213
+2021-03-27 00:13:47,597 - Model - INFO - Best Instance Accuracy: 0.921926, Class Accuracy: 0.891074
+2021-03-27 00:13:47,597 - Model - INFO - Epoch 54 (54/200):
+2021-03-27 00:16:53,841 - Model - INFO - Train Instance Accuracy: 0.943598
+2021-03-27 00:17:30,426 - Model - INFO - Test Instance Accuracy: 0.910680, Class Accuracy: 0.877488
+2021-03-27 00:17:30,427 - Model - INFO - Best Instance Accuracy: 0.921926, Class Accuracy: 0.891074
+2021-03-27 00:17:30,427 - Model - INFO - Epoch 55 (55/200):
+2021-03-27 00:20:42,630 - Model - INFO - Train Instance Accuracy: 0.946443
+2021-03-27 00:21:19,576 - Model - INFO - Test Instance Accuracy: 0.917152, Class Accuracy: 0.877983
+2021-03-27 00:21:19,577 - Model - INFO - Best Instance Accuracy: 0.921926, Class Accuracy: 0.891074
+2021-03-27 00:21:19,577 - Model - INFO - Epoch 56 (56/200):
+2021-03-27 00:24:29,960 - Model - INFO - Train Instance Accuracy: 0.943496
+2021-03-27 00:25:05,579 - Model - INFO - Test Instance Accuracy: 0.917880, Class Accuracy: 0.881963
+2021-03-27 00:25:05,580 - Model - INFO - Best Instance Accuracy: 0.921926, Class Accuracy: 0.891074
+2021-03-27 00:25:05,581 - Model - INFO - Epoch 57 (57/200):
+2021-03-27 00:28:14,551 - Model - INFO - Train Instance Accuracy: 0.941768
+2021-03-27 00:28:50,283 - Model - INFO - Test Instance Accuracy: 0.915453, Class Accuracy: 0.878803
+2021-03-27 00:28:50,283 - Model - INFO - Best Instance Accuracy: 0.921926, Class Accuracy: 0.891074
+2021-03-27 00:28:50,283 - Model - INFO - Epoch 58 (58/200):
+2021-03-27 00:31:59,822 - Model - INFO - Train Instance Accuracy: 0.940244
+2021-03-27 00:32:35,944 - Model - INFO - Test Instance Accuracy: 0.920388, Class Accuracy: 0.888668
+2021-03-27 00:32:35,944 - Model - INFO - Best Instance Accuracy: 0.921926, Class Accuracy: 0.891074
+2021-03-27 00:32:35,945 - Model - INFO - Epoch 59 (59/200):
+2021-03-27 00:35:42,721 - Model - INFO - Train Instance Accuracy: 0.941463
+2021-03-27 00:36:19,883 - Model - INFO - Test Instance Accuracy: 0.914887, Class Accuracy: 0.883440
+2021-03-27 00:36:19,884 - Model - INFO - Best Instance Accuracy: 0.921926, Class Accuracy: 0.891074
+2021-03-27 00:36:19,884 - Model - INFO - Epoch 60 (60/200):
+2021-03-27 00:39:30,904 - Model - INFO - Train Instance Accuracy: 0.949593
+2021-03-27 00:40:09,101 - Model - INFO - Test Instance Accuracy: 0.922087, Class Accuracy: 0.891658
+2021-03-27 00:40:09,101 - Model - INFO - Best Instance Accuracy: 0.922087, Class Accuracy: 0.891658
+2021-03-27 00:40:09,101 - Model - INFO - Save model...
+2021-03-27 00:40:09,101 - Model - INFO - Saving at log/classification/pointnet2_msg_normals/checkpoints/best_model.pth
+2021-03-27 00:40:09,192 - Model - INFO - Epoch 61 (61/200):
+2021-03-27 00:43:22,509 - Model - INFO - Train Instance Accuracy: 0.955589
+2021-03-27 00:43:58,402 - Model - INFO - Test Instance Accuracy: 0.915210, Class Accuracy: 0.885762
+2021-03-27 00:43:58,403 - Model - INFO - Best Instance Accuracy: 0.922087, Class Accuracy: 0.891658
+2021-03-27 00:43:58,403 - Model - INFO - Epoch 62 (62/200):
+2021-03-27 00:47:06,429 - Model - INFO - Train Instance Accuracy: 0.953862
+2021-03-27 00:47:42,701 - Model - INFO - Test Instance Accuracy: 0.918447, Class Accuracy: 0.891119
+2021-03-27 00:47:42,701 - Model - INFO - Best Instance Accuracy: 0.922087, Class Accuracy: 0.891658
+2021-03-27 00:47:42,702 - Model - INFO - Epoch 63 (63/200):
+2021-03-27 00:50:51,119 - Model - INFO - Train Instance Accuracy: 0.954167
+2021-03-27 00:51:27,332 - Model - INFO - Test Instance Accuracy: 0.920469, Class Accuracy: 0.887824
+2021-03-27 00:51:27,333 - Model - INFO - Best Instance Accuracy: 0.922087, Class Accuracy: 0.891658
+2021-03-27 00:51:27,333 - Model - INFO - Epoch 64 (64/200):
+2021-03-27 00:54:34,953 - Model - INFO - Train Instance Accuracy: 0.954167
+2021-03-27 00:55:10,352 - Model - INFO - Test Instance Accuracy: 0.912702, Class Accuracy: 0.888574
+2021-03-27 00:55:10,353 - Model - INFO - Best Instance Accuracy: 0.922087, Class Accuracy: 0.891658
+2021-03-27 00:55:10,353 - Model - INFO - Epoch 65 (65/200):
+2021-03-27 00:58:18,298 - Model - INFO - Train Instance Accuracy: 0.955183
+2021-03-27 00:58:55,588 - Model - INFO - Test Instance Accuracy: 0.908900, Class Accuracy: 0.877223
+2021-03-27 00:58:55,588 - Model - INFO - Best Instance Accuracy: 0.922087, Class Accuracy: 0.891658
+2021-03-27 00:58:55,589 - Model - INFO - Epoch 66 (66/200):
+2021-03-27 01:02:07,595 - Model - INFO - Train Instance Accuracy: 0.958333
+2021-03-27 01:02:43,991 - Model - INFO - Test Instance Accuracy: 0.923301, Class Accuracy: 0.896029
+2021-03-27 01:02:43,991 - Model - INFO - Best Instance Accuracy: 0.923301, Class Accuracy: 0.896029
+2021-03-27 01:02:43,991 - Model - INFO - Save model...
+2021-03-27 01:02:43,991 - Model - INFO - Saving at log/classification/pointnet2_msg_normals/checkpoints/best_model.pth
+2021-03-27 01:02:44,073 - Model - INFO - Epoch 67 (67/200):
+2021-03-27 01:05:52,656 - Model - INFO - Train Instance Accuracy: 0.957927
+2021-03-27 01:06:28,271 - Model - INFO - Test Instance Accuracy: 0.919013, Class Accuracy: 0.888684
+2021-03-27 01:06:28,271 - Model - INFO - Best Instance Accuracy: 0.923301, Class Accuracy: 0.896029
+2021-03-27 01:06:28,271 - Model - INFO - Epoch 68 (68/200):
+2021-03-27 01:09:37,150 - Model - INFO - Train Instance Accuracy: 0.960366
+2021-03-27 01:10:13,618 - Model - INFO - Test Instance Accuracy: 0.918932, Class Accuracy: 0.886872
+2021-03-27 01:10:13,619 - Model - INFO - Best Instance Accuracy: 0.923301, Class Accuracy: 0.896029
+2021-03-27 01:10:13,619 - Model - INFO - Epoch 69 (69/200):
+2021-03-27 01:13:21,898 - Model - INFO - Train Instance Accuracy: 0.954370
+2021-03-27 01:13:57,930 - Model - INFO - Test Instance Accuracy: 0.917476, Class Accuracy: 0.893602
+2021-03-27 01:13:57,931 - Model - INFO - Best Instance Accuracy: 0.923301, Class Accuracy: 0.896029
+2021-03-27 01:13:57,931 - Model - INFO - Epoch 70 (70/200):
+2021-03-27 01:17:05,626 - Model - INFO - Train Instance Accuracy: 0.956809
+2021-03-27 01:17:42,019 - Model - INFO - Test Instance Accuracy: 0.910194, Class Accuracy: 0.892584
+2021-03-27 01:17:42,020 - Model - INFO - Best Instance Accuracy: 0.923301, Class Accuracy: 0.896029
+2021-03-27 01:17:42,020 - Model - INFO - Epoch 71 (71/200):
+2021-03-27 01:20:52,901 - Model - INFO - Train Instance Accuracy: 0.953557
+2021-03-27 01:21:30,108 - Model - INFO - Test Instance Accuracy: 0.923706, Class Accuracy: 0.894070
+2021-03-27 01:21:30,109 - Model - INFO - Best Instance Accuracy: 0.923706, Class Accuracy: 0.896029
+2021-03-27 01:21:30,109 - Model - INFO - Save model...
+2021-03-27 01:21:30,109 - Model - INFO - Saving at log/classification/pointnet2_msg_normals/checkpoints/best_model.pth
+2021-03-27 01:21:30,201 - Model - INFO - Epoch 72 (72/200):
+2021-03-27 01:24:41,230 - Model - INFO - Train Instance Accuracy: 0.955488
+2021-03-27 01:25:17,694 - Model - INFO - Test Instance Accuracy: 0.910922, Class Accuracy: 0.888698
+2021-03-27 01:25:17,694 - Model - INFO - Best Instance Accuracy: 0.923706, Class Accuracy: 0.896029
+2021-03-27 01:25:17,694 - Model - INFO - Epoch 73 (73/200):
+2021-03-27 01:28:26,836 - Model - INFO - Train Instance Accuracy: 0.959756
+2021-03-27 01:29:04,016 - Model - INFO - Test Instance Accuracy: 0.908900, Class Accuracy: 0.888111
+2021-03-27 01:29:04,016 - Model - INFO - Best Instance Accuracy: 0.923706, Class Accuracy: 0.896029
+2021-03-27 01:29:04,016 - Model - INFO - Epoch 74 (74/200):
+2021-03-27 01:32:13,503 - Model - INFO - Train Instance Accuracy: 0.959350
+2021-03-27 01:32:49,011 - Model - INFO - Test Instance Accuracy: 0.918366, Class Accuracy: 0.889735
+2021-03-27 01:32:49,012 - Model - INFO - Best Instance Accuracy: 0.923706, Class Accuracy: 0.896029
+2021-03-27 01:32:49,012 - Model - INFO - Epoch 75 (75/200):
+2021-03-27 01:35:55,929 - Model - INFO - Train Instance Accuracy: 0.958435
+2021-03-27 01:36:32,292 - Model - INFO - Test Instance Accuracy: 0.921117, Class Accuracy: 0.890446
+2021-03-27 01:36:32,293 - Model - INFO - Best Instance Accuracy: 0.923706, Class Accuracy: 0.896029
+2021-03-27 01:36:32,293 - Model - INFO - Epoch 76 (76/200):
+2021-03-27 01:39:42,959 - Model - INFO - Train Instance Accuracy: 0.958028
+2021-03-27 01:40:19,590 - Model - INFO - Test Instance Accuracy: 0.914401, Class Accuracy: 0.887854
+2021-03-27 01:40:19,590 - Model - INFO - Best Instance Accuracy: 0.923706, Class Accuracy: 0.896029
+2021-03-27 01:40:19,590 - Model - INFO - Epoch 77 (77/200):
+2021-03-27 01:43:30,646 - Model - INFO - Train Instance Accuracy: 0.957520
+2021-03-27 01:44:07,401 - Model - INFO - Test Instance Accuracy: 0.916262, Class Accuracy: 0.884461
+2021-03-27 01:44:07,401 - Model - INFO - Best Instance Accuracy: 0.923706, Class Accuracy: 0.896029
+2021-03-27 01:44:07,401 - Model - INFO - Epoch 78 (78/200):
+2021-03-27 01:47:15,394 - Model - INFO - Train Instance Accuracy: 0.959654
+2021-03-27 01:47:51,949 - Model - INFO - Test Instance Accuracy: 0.920874, Class Accuracy: 0.903344
+2021-03-27 01:47:51,949 - Model - INFO - Best Instance Accuracy: 0.923706, Class Accuracy: 0.903344
+2021-03-27 01:47:51,949 - Model - INFO - Epoch 79 (79/200):
+2021-03-27 01:51:00,546 - Model - INFO - Train Instance Accuracy: 0.960061
+2021-03-27 01:51:36,234 - Model - INFO - Test Instance Accuracy: 0.914968, Class Accuracy: 0.880035
+2021-03-27 01:51:36,235 - Model - INFO - Best Instance Accuracy: 0.923706, Class Accuracy: 0.903344
+2021-03-27 01:51:36,235 - Model - INFO - Epoch 80 (80/200):
+2021-03-27 01:54:42,984 - Model - INFO - Train Instance Accuracy: 0.966870
+2021-03-27 01:55:18,683 - Model - INFO - Test Instance Accuracy: 0.923867, Class Accuracy: 0.898023
+2021-03-27 01:55:18,684 - Model - INFO - Best Instance Accuracy: 0.923867, Class Accuracy: 0.903344
+2021-03-27 01:55:18,684 - Model - INFO - Save model...
+2021-03-27 01:55:18,684 - Model - INFO - Saving at log/classification/pointnet2_msg_normals/checkpoints/best_model.pth
+2021-03-27 01:55:18,770 - Model - INFO - Epoch 81 (81/200):
+2021-03-27 01:58:28,301 - Model - INFO - Train Instance Accuracy: 0.962500
+2021-03-27 01:59:05,759 - Model - INFO - Test Instance Accuracy: 0.925728, Class Accuracy: 0.897799
+2021-03-27 01:59:05,759 - Model - INFO - Best Instance Accuracy: 0.925728, Class Accuracy: 0.903344
+2021-03-27 01:59:05,759 - Model - INFO - Save model...
+2021-03-27 01:59:05,760 - Model - INFO - Saving at log/classification/pointnet2_msg_normals/checkpoints/best_model.pth
+2021-03-27 01:59:05,859 - Model - INFO - Epoch 82 (82/200):
+2021-03-27 02:02:16,507 - Model - INFO - Train Instance Accuracy: 0.969207
+2021-03-27 02:02:53,850 - Model - INFO - Test Instance Accuracy: 0.922816, Class Accuracy: 0.897147
+2021-03-27 02:02:53,850 - Model - INFO - Best Instance Accuracy: 0.925728, Class Accuracy: 0.903344
+2021-03-27 02:02:53,850 - Model - INFO - Epoch 83 (83/200):
+2021-03-27 02:06:01,751 - Model - INFO - Train Instance Accuracy: 0.965549
+2021-03-27 02:06:37,873 - Model - INFO - Test Instance Accuracy: 0.921117, Class Accuracy: 0.900510
+2021-03-27 02:06:37,873 - Model - INFO - Best Instance Accuracy: 0.925728, Class Accuracy: 0.903344
+2021-03-27 02:06:37,873 - Model - INFO - Epoch 84 (84/200):
+2021-03-27 02:09:46,546 - Model - INFO - Train Instance Accuracy: 0.965142
+2021-03-27 02:10:22,801 - Model - INFO - Test Instance Accuracy: 0.923706, Class Accuracy: 0.895119
+2021-03-27 02:10:22,802 - Model - INFO - Best Instance Accuracy: 0.925728, Class Accuracy: 0.903344
+2021-03-27 02:10:22,802 - Model - INFO - Epoch 85 (85/200):
+2021-03-27 02:13:30,045 - Model - INFO - Train Instance Accuracy: 0.968191
+2021-03-27 02:14:05,712 - Model - INFO - Test Instance Accuracy: 0.919984, Class Accuracy: 0.893379
+2021-03-27 02:14:05,712 - Model - INFO - Best Instance Accuracy: 0.925728, Class Accuracy: 0.903344
+2021-03-27 02:14:05,712 - Model - INFO - Epoch 86 (86/200):
+2021-03-27 02:17:15,976 - Model - INFO - Train Instance Accuracy: 0.969309
+2021-03-27 02:17:52,656 - Model - INFO - Test Instance Accuracy: 0.923625, Class Accuracy: 0.889313
+2021-03-27 02:17:52,656 - Model - INFO - Best Instance Accuracy: 0.925728, Class Accuracy: 0.903344
+2021-03-27 02:17:52,656 - Model - INFO - Epoch 87 (87/200):
+2021-03-27 02:21:04,692 - Model - INFO - Train Instance Accuracy: 0.966362
+2021-03-27 02:21:40,751 - Model - INFO - Test Instance Accuracy: 0.925647, Class Accuracy: 0.899212
+2021-03-27 02:21:40,751 - Model - INFO - Best Instance Accuracy: 0.925728, Class Accuracy: 0.903344
+2021-03-27 02:21:40,751 - Model - INFO - Epoch 88 (88/200):
+2021-03-27 02:24:50,762 - Model - INFO - Train Instance Accuracy: 0.967886
+2021-03-27 02:25:26,866 - Model - INFO - Test Instance Accuracy: 0.922087, Class Accuracy: 0.898278
+2021-03-27 02:25:26,867 - Model - INFO - Best Instance Accuracy: 0.925728, Class Accuracy: 0.903344
+2021-03-27 02:25:26,867 - Model - INFO - Epoch 89 (89/200):
+2021-03-27 02:28:36,195 - Model - INFO - Train Instance Accuracy: 0.967480
+2021-03-27 02:29:12,864 - Model - INFO - Test Instance Accuracy: 0.922411, Class Accuracy: 0.893827
+2021-03-27 02:29:12,864 - Model - INFO - Best Instance Accuracy: 0.925728, Class Accuracy: 0.903344
+2021-03-27 02:29:12,864 - Model - INFO - Epoch 90 (90/200):
+2021-03-27 02:32:19,517 - Model - INFO - Train Instance Accuracy: 0.968801
+2021-03-27 02:32:55,366 - Model - INFO - Test Instance Accuracy: 0.913754, Class Accuracy: 0.890605
+2021-03-27 02:32:55,366 - Model - INFO - Best Instance Accuracy: 0.925728, Class Accuracy: 0.903344
+2021-03-27 02:32:55,367 - Model - INFO - Epoch 91 (91/200):
+2021-03-27 02:36:04,428 - Model - INFO - Train Instance Accuracy: 0.971037
+2021-03-27 02:36:40,684 - Model - INFO - Test Instance Accuracy: 0.924676, Class Accuracy: 0.899567
+2021-03-27 02:36:40,684 - Model - INFO - Best Instance Accuracy: 0.925728, Class Accuracy: 0.903344
+2021-03-27 02:36:40,684 - Model - INFO - Epoch 92 (92/200):
+2021-03-27 02:39:52,347 - Model - INFO - Train Instance Accuracy: 0.968699
+2021-03-27 02:40:28,648 - Model - INFO - Test Instance Accuracy: 0.922896, Class Accuracy: 0.890931
+2021-03-27 02:40:28,649 - Model - INFO - Best Instance Accuracy: 0.925728, Class Accuracy: 0.903344
+2021-03-27 02:40:28,649 - Model - INFO - Epoch 93 (93/200):
+2021-03-27 02:43:36,711 - Model - INFO - Train Instance Accuracy: 0.968598
+2021-03-27 02:44:12,619 - Model - INFO - Test Instance Accuracy: 0.912217, Class Accuracy: 0.882876
+2021-03-27 02:44:12,619 - Model - INFO - Best Instance Accuracy: 0.925728, Class Accuracy: 0.903344
+2021-03-27 02:44:12,620 - Model - INFO - Epoch 94 (94/200):
+2021-03-27 02:47:21,189 - Model - INFO - Train Instance Accuracy: 0.967480
+2021-03-27 02:47:57,843 - Model - INFO - Test Instance Accuracy: 0.921278, Class Accuracy: 0.897462
+2021-03-27 02:47:57,844 - Model - INFO - Best Instance Accuracy: 0.925728, Class Accuracy: 0.903344
+2021-03-27 02:47:57,844 - Model - INFO - Epoch 95 (95/200):
+2021-03-27 02:51:05,890 - Model - INFO - Train Instance Accuracy: 0.969106
+2021-03-27 02:51:41,849 - Model - INFO - Test Instance Accuracy: 0.913188, Class Accuracy: 0.890090
+2021-03-27 02:51:41,849 - Model - INFO - Best Instance Accuracy: 0.925728, Class Accuracy: 0.903344
+2021-03-27 02:51:41,849 - Model - INFO - Epoch 96 (96/200):
+2021-03-27 02:54:49,559 - Model - INFO - Train Instance Accuracy: 0.969512
+2021-03-27 02:55:26,235 - Model - INFO - Test Instance Accuracy: 0.915534, Class Accuracy: 0.891338
+2021-03-27 02:55:26,236 - Model - INFO - Best Instance Accuracy: 0.925728, Class Accuracy: 0.903344
+2021-03-27 02:55:26,236 - Model - INFO - Epoch 97 (97/200):
+2021-03-27 02:58:36,732 - Model - INFO - Train Instance Accuracy: 0.971748
+2021-03-27 02:59:13,328 - Model - INFO - Test Instance Accuracy: 0.922816, Class Accuracy: 0.892314
+2021-03-27 02:59:13,328 - Model - INFO - Best Instance Accuracy: 0.925728, Class Accuracy: 0.903344
+2021-03-27 02:59:13,329 - Model - INFO - Epoch 98 (98/200):
+2021-03-27 03:02:22,989 - Model - INFO - Train Instance Accuracy: 0.970325
+2021-03-27 03:02:59,593 - Model - INFO - Test Instance Accuracy: 0.921764, Class Accuracy: 0.883498
+2021-03-27 03:02:59,594 - Model - INFO - Best Instance Accuracy: 0.925728, Class Accuracy: 0.903344
+2021-03-27 03:02:59,594 - Model - INFO - Epoch 99 (99/200):
+2021-03-27 03:06:07,092 - Model - INFO - Train Instance Accuracy: 0.971240
+2021-03-27 03:06:42,899 - Model - INFO - Test Instance Accuracy: 0.915534, Class Accuracy: 0.882447
+2021-03-27 03:06:42,900 - Model - INFO - Best Instance Accuracy: 0.925728, Class Accuracy: 0.903344
+2021-03-27 03:06:42,900 - Model - INFO - Epoch 100 (100/200):
+2021-03-27 03:09:51,238 - Model - INFO - Train Instance Accuracy: 0.974390
+2021-03-27 03:10:26,396 - Model - INFO - Test Instance Accuracy: 0.917880, Class Accuracy: 0.890529
+2021-03-27 03:10:26,396 - Model - INFO - Best Instance Accuracy: 0.925728, Class Accuracy: 0.903344
+2021-03-27 03:10:26,396 - Model - INFO - Epoch 101 (101/200):
+2021-03-27 03:13:33,380 - Model - INFO - Train Instance Accuracy: 0.975610
+2021-03-27 03:14:09,328 - Model - INFO - Test Instance Accuracy: 0.921197, Class Accuracy: 0.891717
+2021-03-27 03:14:09,328 - Model - INFO - Best Instance Accuracy: 0.925728, Class Accuracy: 0.903344
+2021-03-27 03:14:09,328 - Model - INFO - Epoch 102 (102/200):
+2021-03-27 03:17:19,515 - Model - INFO - Train Instance Accuracy: 0.976524
+2021-03-27 03:17:56,425 - Model - INFO - Test Instance Accuracy: 0.923625, Class Accuracy: 0.894886
+2021-03-27 03:17:56,425 - Model - INFO - Best Instance Accuracy: 0.925728, Class Accuracy: 0.903344
+2021-03-27 03:17:56,425 - Model - INFO - Epoch 103 (103/200):
+2021-03-27 03:21:05,726 - Model - INFO - Train Instance Accuracy: 0.977439
+2021-03-27 03:21:41,776 - Model - INFO - Test Instance Accuracy: 0.919822, Class Accuracy: 0.887097
+2021-03-27 03:21:41,776 - Model - INFO - Best Instance Accuracy: 0.925728, Class Accuracy: 0.903344
+2021-03-27 03:21:41,776 - Model - INFO - Epoch 104 (104/200):
+2021-03-27 03:24:50,318 - Model - INFO - Train Instance Accuracy: 0.978557
+2021-03-27 03:25:26,649 - Model - INFO - Test Instance Accuracy: 0.920793, Class Accuracy: 0.886681
+2021-03-27 03:25:26,650 - Model - INFO - Best Instance Accuracy: 0.925728, Class Accuracy: 0.903344
+2021-03-27 03:25:26,650 - Model - INFO - Epoch 105 (105/200):
+2021-03-27 03:28:35,697 - Model - INFO - Train Instance Accuracy: 0.974492
+2021-03-27 03:29:11,579 - Model - INFO - Test Instance Accuracy: 0.918528, Class Accuracy: 0.894964
+2021-03-27 03:29:11,579 - Model - INFO - Best Instance Accuracy: 0.925728, Class Accuracy: 0.903344
+2021-03-27 03:29:11,579 - Model - INFO - Epoch 106 (106/200):
+2021-03-27 03:32:18,532 - Model - INFO - Train Instance Accuracy: 0.977744
+2021-03-27 03:32:54,875 - Model - INFO - Test Instance Accuracy: 0.919984, Class Accuracy: 0.892345
+2021-03-27 03:32:54,876 - Model - INFO - Best Instance Accuracy: 0.925728, Class Accuracy: 0.903344
+2021-03-27 03:32:54,876 - Model - INFO - Epoch 107 (107/200):
+2021-03-27 03:36:04,438 - Model - INFO - Train Instance Accuracy: 0.975508
+2021-03-27 03:36:40,897 - Model - INFO - Test Instance Accuracy: 0.919498, Class Accuracy: 0.896416
+2021-03-27 03:36:40,898 - Model - INFO - Best Instance Accuracy: 0.925728, Class Accuracy: 0.903344
+2021-03-27 03:36:40,898 - Model - INFO - Epoch 108 (108/200):
+2021-03-27 03:39:51,338 - Model - INFO - Train Instance Accuracy: 0.976524
+2021-03-27 03:40:28,796 - Model - INFO - Test Instance Accuracy: 0.925647, Class Accuracy: 0.901199
+2021-03-27 03:40:28,796 - Model - INFO - Best Instance Accuracy: 0.925728, Class Accuracy: 0.903344
+2021-03-27 03:40:28,797 - Model - INFO - Epoch 109 (109/200):
+2021-03-27 03:43:36,450 - Model - INFO - Train Instance Accuracy: 0.979167
+2021-03-27 03:44:12,955 - Model - INFO - Test Instance Accuracy: 0.922896, Class Accuracy: 0.898842
+2021-03-27 03:44:12,955 - Model - INFO - Best Instance Accuracy: 0.925728, Class Accuracy: 0.903344
+2021-03-27 03:44:12,955 - Model - INFO - Epoch 110 (110/200):
+2021-03-27 03:47:22,092 - Model - INFO - Train Instance Accuracy: 0.978760
+2021-03-27 03:47:58,994 - Model - INFO - Test Instance Accuracy: 0.924110, Class Accuracy: 0.897842
+2021-03-27 03:47:58,994 - Model - INFO - Best Instance Accuracy: 0.925728, Class Accuracy: 0.903344
+2021-03-27 03:47:58,995 - Model - INFO - Epoch 111 (111/200):
+2021-03-27 03:51:04,997 - Model - INFO - Train Instance Accuracy: 0.976829
+2021-03-27 03:51:40,628 - Model - INFO - Test Instance Accuracy: 0.926618, Class Accuracy: 0.897422
+2021-03-27 03:51:40,629 - Model - INFO - Best Instance Accuracy: 0.926618, Class Accuracy: 0.903344
+2021-03-27 03:51:40,629 - Model - INFO - Save model...
+2021-03-27 03:51:40,629 - Model - INFO - Saving at log/classification/pointnet2_msg_normals/checkpoints/best_model.pth
+2021-03-27 03:51:40,712 - Model - INFO - Epoch 112 (112/200):
+2021-03-27 03:54:50,392 - Model - INFO - Train Instance Accuracy: 0.977134
+2021-03-27 03:55:26,991 - Model - INFO - Test Instance Accuracy: 0.919984, Class Accuracy: 0.892330
+2021-03-27 03:55:26,991 - Model - INFO - Best Instance Accuracy: 0.926618, Class Accuracy: 0.903344
+2021-03-27 03:55:26,992 - Model - INFO - Epoch 113 (113/200):
+2021-03-27 03:58:37,117 - Model - INFO - Train Instance Accuracy: 0.977033
+2021-03-27 03:59:14,662 - Model - INFO - Test Instance Accuracy: 0.919660, Class Accuracy: 0.891562
+2021-03-27 03:59:14,662 - Model - INFO - Best Instance Accuracy: 0.926618, Class Accuracy: 0.903344
+2021-03-27 03:59:14,662 - Model - INFO - Epoch 114 (114/200):
+2021-03-27 04:02:23,373 - Model - INFO - Train Instance Accuracy: 0.978150
+2021-03-27 04:03:00,028 - Model - INFO - Test Instance Accuracy: 0.920955, Class Accuracy: 0.889973
+2021-03-27 04:03:00,028 - Model - INFO - Best Instance Accuracy: 0.926618, Class Accuracy: 0.903344
+2021-03-27 04:03:00,028 - Model - INFO - Epoch 115 (115/200):
+2021-03-27 04:06:07,261 - Model - INFO - Train Instance Accuracy: 0.975610
+2021-03-27 04:06:43,231 - Model - INFO - Test Instance Accuracy: 0.918770, Class Accuracy: 0.900687
+2021-03-27 04:06:43,231 - Model - INFO - Best Instance Accuracy: 0.926618, Class Accuracy: 0.903344
+2021-03-27 04:06:43,231 - Model - INFO - Epoch 116 (116/200):
+2021-03-27 04:09:49,830 - Model - INFO - Train Instance Accuracy: 0.978455
+2021-03-27 04:10:25,223 - Model - INFO - Test Instance Accuracy: 0.922411, Class Accuracy: 0.896952
+2021-03-27 04:10:25,223 - Model - INFO - Best Instance Accuracy: 0.926618, Class Accuracy: 0.903344
+2021-03-27 04:10:25,223 - Model - INFO - Epoch 117 (117/200):
+2021-03-27 04:13:31,753 - Model - INFO - Train Instance Accuracy: 0.978557
+2021-03-27 04:14:07,656 - Model - INFO - Test Instance Accuracy: 0.921197, Class Accuracy: 0.894379
+2021-03-27 04:14:07,656 - Model - INFO - Best Instance Accuracy: 0.926618, Class Accuracy: 0.903344
+2021-03-27 04:14:07,657 - Model - INFO - Epoch 118 (118/200):
+2021-03-27 04:17:19,162 - Model - INFO - Train Instance Accuracy: 0.980183
+2021-03-27 04:17:55,617 - Model - INFO - Test Instance Accuracy: 0.918528, Class Accuracy: 0.893803
+2021-03-27 04:17:55,617 - Model - INFO - Best Instance Accuracy: 0.926618, Class Accuracy: 0.903344
+2021-03-27 04:17:55,617 - Model - INFO - Epoch 119 (119/200):
+2021-03-27 04:21:05,487 - Model - INFO - Train Instance Accuracy: 0.979065
+2021-03-27 04:21:41,348 - Model - INFO - Test Instance Accuracy: 0.920874, Class Accuracy: 0.900908
+2021-03-27 04:21:41,348 - Model - INFO - Best Instance Accuracy: 0.926618, Class Accuracy: 0.903344
+2021-03-27 04:21:41,349 - Model - INFO - Epoch 120 (120/200):
+2021-03-27 04:24:49,485 - Model - INFO - Train Instance Accuracy: 0.981911
+2021-03-27 04:25:25,242 - Model - INFO - Test Instance Accuracy: 0.924434, Class Accuracy: 0.893140
+2021-03-27 04:25:25,242 - Model - INFO - Best Instance Accuracy: 0.926618, Class Accuracy: 0.903344
+2021-03-27 04:25:25,242 - Model - INFO - Epoch 121 (121/200):
+2021-03-27 04:28:33,204 - Model - INFO - Train Instance Accuracy: 0.978862
+2021-03-27 04:29:09,218 - Model - INFO - Test Instance Accuracy: 0.919175, Class Accuracy: 0.890974
+2021-03-27 04:29:09,218 - Model - INFO - Best Instance Accuracy: 0.926618, Class Accuracy: 0.903344
+2021-03-27 04:29:09,218 - Model - INFO - Epoch 122 (122/200):
+2021-03-27 04:32:15,202 - Model - INFO - Train Instance Accuracy: 0.981606
+2021-03-27 04:32:51,678 - Model - INFO - Test Instance Accuracy: 0.919984, Class Accuracy: 0.899963
+2021-03-27 04:32:51,678 - Model - INFO - Best Instance Accuracy: 0.926618, Class Accuracy: 0.903344
+2021-03-27 04:32:51,678 - Model - INFO - Epoch 123 (123/200):
+2021-03-27 04:36:00,015 - Model - INFO - Train Instance Accuracy: 0.983638
+2021-03-27 04:36:36,704 - Model - INFO - Test Instance Accuracy: 0.921521, Class Accuracy: 0.895623
+2021-03-27 04:36:36,704 - Model - INFO - Best Instance Accuracy: 0.926618, Class Accuracy: 0.903344
+2021-03-27 04:36:36,705 - Model - INFO - Epoch 124 (124/200):
+2021-03-27 04:39:47,808 - Model - INFO - Train Instance Accuracy: 0.983537
+2021-03-27 04:40:24,080 - Model - INFO - Test Instance Accuracy: 0.919579, Class Accuracy: 0.895894
+2021-03-27 04:40:24,080 - Model - INFO - Best Instance Accuracy: 0.926618, Class Accuracy: 0.903344
+2021-03-27 04:40:24,080 - Model - INFO - Epoch 125 (125/200):
+2021-03-27 04:43:31,547 - Model - INFO - Train Instance Accuracy: 0.983130
+2021-03-27 04:44:08,014 - Model - INFO - Test Instance Accuracy: 0.922411, Class Accuracy: 0.904956
+2021-03-27 04:44:08,014 - Model - INFO - Best Instance Accuracy: 0.926618, Class Accuracy: 0.904956
+2021-03-27 04:44:08,014 - Model - INFO - Epoch 126 (126/200):
+2021-03-27 04:47:16,759 - Model - INFO - Train Instance Accuracy: 0.984858
+2021-03-27 04:47:53,079 - Model - INFO - Test Instance Accuracy: 0.924838, Class Accuracy: 0.902480
+2021-03-27 04:47:53,079 - Model - INFO - Best Instance Accuracy: 0.926618, Class Accuracy: 0.904956
+2021-03-27 04:47:53,079 - Model - INFO - Epoch 127 (127/200):
+2021-03-27 04:51:00,578 - Model - INFO - Train Instance Accuracy: 0.982012
+2021-03-27 04:51:35,887 - Model - INFO - Test Instance Accuracy: 0.919984, Class Accuracy: 0.894272
+2021-03-27 04:51:35,887 - Model - INFO - Best Instance Accuracy: 0.926618, Class Accuracy: 0.904956
+2021-03-27 04:51:35,888 - Model - INFO - Epoch 128 (128/200):
+2021-03-27 04:54:42,644 - Model - INFO - Train Instance Accuracy: 0.981301
+2021-03-27 04:55:19,430 - Model - INFO - Test Instance Accuracy: 0.921197, Class Accuracy: 0.893158
+2021-03-27 04:55:19,431 - Model - INFO - Best Instance Accuracy: 0.926618, Class Accuracy: 0.904956
+2021-03-27 04:55:19,431 - Model - INFO - Epoch 129 (129/200):
+2021-03-27 04:58:31,021 - Model - INFO - Train Instance Accuracy: 0.981911
+2021-03-27 04:59:07,964 - Model - INFO - Test Instance Accuracy: 0.928964, Class Accuracy: 0.904224
+2021-03-27 04:59:07,964 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.904956
+2021-03-27 04:59:07,964 - Model - INFO - Save model...
+2021-03-27 04:59:07,964 - Model - INFO - Saving at log/classification/pointnet2_msg_normals/checkpoints/best_model.pth
+2021-03-27 04:59:08,045 - Model - INFO - Epoch 130 (130/200):
+2021-03-27 05:02:17,132 - Model - INFO - Train Instance Accuracy: 0.982317
+2021-03-27 05:02:53,682 - Model - INFO - Test Instance Accuracy: 0.921602, Class Accuracy: 0.894738
+2021-03-27 05:02:53,683 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.904956
+2021-03-27 05:02:53,683 - Model - INFO - Epoch 131 (131/200):
+2021-03-27 05:06:00,389 - Model - INFO - Train Instance Accuracy: 0.984858
+2021-03-27 05:06:36,543 - Model - INFO - Test Instance Accuracy: 0.924838, Class Accuracy: 0.900552
+2021-03-27 05:06:36,543 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.904956
+2021-03-27 05:06:36,543 - Model - INFO - Epoch 132 (132/200):
+2021-03-27 05:09:44,674 - Model - INFO - Train Instance Accuracy: 0.984553
+2021-03-27 05:10:20,043 - Model - INFO - Test Instance Accuracy: 0.919660, Class Accuracy: 0.893706
+2021-03-27 05:10:20,044 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.904956
+2021-03-27 05:10:20,044 - Model - INFO - Epoch 133 (133/200):
+2021-03-27 05:13:26,033 - Model - INFO - Train Instance Accuracy: 0.984553
+2021-03-27 05:14:02,158 - Model - INFO - Test Instance Accuracy: 0.914320, Class Accuracy: 0.896759
+2021-03-27 05:14:02,158 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.904956
+2021-03-27 05:14:02,159 - Model - INFO - Epoch 134 (134/200):
+2021-03-27 05:17:12,693 - Model - INFO - Train Instance Accuracy: 0.982520
+2021-03-27 05:17:49,300 - Model - INFO - Test Instance Accuracy: 0.921602, Class Accuracy: 0.893418
+2021-03-27 05:17:49,300 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.904956
+2021-03-27 05:17:49,301 - Model - INFO - Epoch 135 (135/200):
+2021-03-27 05:20:58,975 - Model - INFO - Train Instance Accuracy: 0.982927
+2021-03-27 05:21:35,377 - Model - INFO - Test Instance Accuracy: 0.924919, Class Accuracy: 0.906640
+2021-03-27 05:21:35,378 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.906640
+2021-03-27 05:21:35,378 - Model - INFO - Epoch 136 (136/200):
+2021-03-27 05:24:43,750 - Model - INFO - Train Instance Accuracy: 0.984654
+2021-03-27 05:25:19,951 - Model - INFO - Test Instance Accuracy: 0.923867, Class Accuracy: 0.898661
+2021-03-27 05:25:19,952 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.906640
+2021-03-27 05:25:19,952 - Model - INFO - Epoch 137 (137/200):
+2021-03-27 05:28:29,468 - Model - INFO - Train Instance Accuracy: 0.984146
+2021-03-27 05:29:05,290 - Model - INFO - Test Instance Accuracy: 0.922006, Class Accuracy: 0.892466
+2021-03-27 05:29:05,291 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.906640
+2021-03-27 05:29:05,291 - Model - INFO - Epoch 138 (138/200):
+2021-03-27 05:32:12,082 - Model - INFO - Train Instance Accuracy: 0.985163
+2021-03-27 05:32:49,301 - Model - INFO - Test Instance Accuracy: 0.917557, Class Accuracy: 0.888427
+2021-03-27 05:32:49,301 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.906640
+2021-03-27 05:32:49,301 - Model - INFO - Epoch 139 (139/200):
+2021-03-27 05:36:01,427 - Model - INFO - Train Instance Accuracy: 0.981606
+2021-03-27 05:36:37,982 - Model - INFO - Test Instance Accuracy: 0.915049, Class Accuracy: 0.890795
+2021-03-27 05:36:37,982 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.906640
+2021-03-27 05:36:37,982 - Model - INFO - Epoch 140 (140/200):
+2021-03-27 05:39:49,636 - Model - INFO - Train Instance Accuracy: 0.984146
+2021-03-27 05:40:25,953 - Model - INFO - Test Instance Accuracy: 0.924595, Class Accuracy: 0.907417
+2021-03-27 05:40:25,953 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 05:40:25,953 - Model - INFO - Epoch 141 (141/200):
+2021-03-27 05:43:35,456 - Model - INFO - Train Instance Accuracy: 0.985772
+2021-03-27 05:44:12,035 - Model - INFO - Test Instance Accuracy: 0.924029, Class Accuracy: 0.903490
+2021-03-27 05:44:12,036 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 05:44:12,036 - Model - INFO - Epoch 142 (142/200):
+2021-03-27 05:47:19,825 - Model - INFO - Train Instance Accuracy: 0.985366
+2021-03-27 05:47:55,771 - Model - INFO - Test Instance Accuracy: 0.920146, Class Accuracy: 0.894984
+2021-03-27 05:47:55,778 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 05:47:55,778 - Model - INFO - Epoch 143 (143/200):
+2021-03-27 05:51:02,774 - Model - INFO - Train Instance Accuracy: 0.987297
+2021-03-27 05:51:38,992 - Model - INFO - Test Instance Accuracy: 0.924595, Class Accuracy: 0.902766
+2021-03-27 05:51:38,992 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 05:51:38,992 - Model - INFO - Epoch 144 (144/200):
+2021-03-27 05:54:46,875 - Model - INFO - Train Instance Accuracy: 0.985569
+2021-03-27 05:55:23,189 - Model - INFO - Test Instance Accuracy: 0.922896, Class Accuracy: 0.893116
+2021-03-27 05:55:23,189 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 05:55:23,190 - Model - INFO - Epoch 145 (145/200):
+2021-03-27 05:58:33,423 - Model - INFO - Train Instance Accuracy: 0.986382
+2021-03-27 05:59:10,363 - Model - INFO - Test Instance Accuracy: 0.923301, Class Accuracy: 0.902307
+2021-03-27 05:59:10,363 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 05:59:10,363 - Model - INFO - Epoch 146 (146/200):
+2021-03-27 06:02:19,897 - Model - INFO - Train Instance Accuracy: 0.987093
+2021-03-27 06:02:55,879 - Model - INFO - Test Instance Accuracy: 0.922330, Class Accuracy: 0.903168
+2021-03-27 06:02:55,879 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 06:02:55,879 - Model - INFO - Epoch 147 (147/200):
+2021-03-27 06:06:04,517 - Model - INFO - Train Instance Accuracy: 0.986890
+2021-03-27 06:06:40,757 - Model - INFO - Test Instance Accuracy: 0.917152, Class Accuracy: 0.896476
+2021-03-27 06:06:40,758 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 06:06:40,758 - Model - INFO - Epoch 148 (148/200):
+2021-03-27 06:09:48,928 - Model - INFO - Train Instance Accuracy: 0.987703
+2021-03-27 06:10:24,522 - Model - INFO - Test Instance Accuracy: 0.924110, Class Accuracy: 0.895754
+2021-03-27 06:10:24,523 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 06:10:24,523 - Model - INFO - Epoch 149 (149/200):
+2021-03-27 06:13:32,671 - Model - INFO - Train Instance Accuracy: 0.985061
+2021-03-27 06:14:08,833 - Model - INFO - Test Instance Accuracy: 0.920065, Class Accuracy: 0.892883
+2021-03-27 06:14:08,833 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 06:14:08,834 - Model - INFO - Epoch 150 (150/200):
+2021-03-27 06:17:18,325 - Model - INFO - Train Instance Accuracy: 0.987093
+2021-03-27 06:17:56,414 - Model - INFO - Test Instance Accuracy: 0.926052, Class Accuracy: 0.901977
+2021-03-27 06:17:56,414 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 06:17:56,419 - Model - INFO - Epoch 151 (151/200):
+2021-03-27 06:21:05,700 - Model - INFO - Train Instance Accuracy: 0.986382
+2021-03-27 06:21:41,649 - Model - INFO - Test Instance Accuracy: 0.926861, Class Accuracy: 0.902619
+2021-03-27 06:21:41,649 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 06:21:41,649 - Model - INFO - Epoch 152 (152/200):
+2021-03-27 06:24:49,056 - Model - INFO - Train Instance Accuracy: 0.985874
+2021-03-27 06:25:25,217 - Model - INFO - Test Instance Accuracy: 0.925000, Class Accuracy: 0.902453
+2021-03-27 06:25:25,217 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 06:25:25,217 - Model - INFO - Epoch 153 (153/200):
+2021-03-27 06:28:33,675 - Model - INFO - Train Instance Accuracy: 0.987297
+2021-03-27 06:29:10,264 - Model - INFO - Test Instance Accuracy: 0.925405, Class Accuracy: 0.902045
+2021-03-27 06:29:10,264 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 06:29:10,264 - Model - INFO - Epoch 154 (154/200):
+2021-03-27 06:32:16,084 - Model - INFO - Train Instance Accuracy: 0.985671
+2021-03-27 06:32:52,097 - Model - INFO - Test Instance Accuracy: 0.919984, Class Accuracy: 0.892647
+2021-03-27 06:32:52,097 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 06:32:52,097 - Model - INFO - Epoch 155 (155/200):
+2021-03-27 06:36:02,575 - Model - INFO - Train Instance Accuracy: 0.985671
+2021-03-27 06:36:39,232 - Model - INFO - Test Instance Accuracy: 0.919984, Class Accuracy: 0.895237
+2021-03-27 06:36:39,232 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 06:36:39,232 - Model - INFO - Epoch 156 (156/200):
+2021-03-27 06:39:49,872 - Model - INFO - Train Instance Accuracy: 0.984959
+2021-03-27 06:40:26,250 - Model - INFO - Test Instance Accuracy: 0.916748, Class Accuracy: 0.892456
+2021-03-27 06:40:26,250 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 06:40:26,250 - Model - INFO - Epoch 157 (157/200):
+2021-03-27 06:43:33,930 - Model - INFO - Train Instance Accuracy: 0.987602
+2021-03-27 06:44:10,238 - Model - INFO - Test Instance Accuracy: 0.923625, Class Accuracy: 0.898351
+2021-03-27 06:44:10,238 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 06:44:10,239 - Model - INFO - Epoch 158 (158/200):
+2021-03-27 06:47:18,943 - Model - INFO - Train Instance Accuracy: 0.987805
+2021-03-27 06:47:54,815 - Model - INFO - Test Instance Accuracy: 0.919660, Class Accuracy: 0.899956
+2021-03-27 06:47:54,815 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 06:47:54,816 - Model - INFO - Epoch 159 (159/200):
+2021-03-27 06:51:01,788 - Model - INFO - Train Instance Accuracy: 0.986484
+2021-03-27 06:51:37,427 - Model - INFO - Test Instance Accuracy: 0.918770, Class Accuracy: 0.903619
+2021-03-27 06:51:37,427 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 06:51:37,427 - Model - INFO - Epoch 160 (160/200):
+2021-03-27 06:54:45,045 - Model - INFO - Train Instance Accuracy: 0.988008
+2021-03-27 06:55:21,430 - Model - INFO - Test Instance Accuracy: 0.918366, Class Accuracy: 0.893453
+2021-03-27 06:55:21,431 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 06:55:21,431 - Model - INFO - Epoch 161 (161/200):
+2021-03-27 06:58:32,656 - Model - INFO - Train Instance Accuracy: 0.988313
+2021-03-27 06:59:09,956 - Model - INFO - Test Instance Accuracy: 0.922573, Class Accuracy: 0.903071
+2021-03-27 06:59:09,956 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 06:59:09,956 - Model - INFO - Epoch 162 (162/200):
+2021-03-27 07:02:18,815 - Model - INFO - Train Instance Accuracy: 0.988618
+2021-03-27 07:02:55,540 - Model - INFO - Test Instance Accuracy: 0.920065, Class Accuracy: 0.900484
+2021-03-27 07:02:55,540 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 07:02:55,540 - Model - INFO - Epoch 163 (163/200):
+2021-03-27 07:06:04,851 - Model - INFO - Train Instance Accuracy: 0.989228
+2021-03-27 07:06:40,951 - Model - INFO - Test Instance Accuracy: 0.920388, Class Accuracy: 0.899304
+2021-03-27 07:06:40,951 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 07:06:40,951 - Model - INFO - Epoch 164 (164/200):
+2021-03-27 07:09:49,934 - Model - INFO - Train Instance Accuracy: 0.991362
+2021-03-27 07:10:25,749 - Model - INFO - Test Instance Accuracy: 0.920388, Class Accuracy: 0.898391
+2021-03-27 07:10:25,749 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 07:10:25,750 - Model - INFO - Epoch 165 (165/200):
+2021-03-27 07:13:32,919 - Model - INFO - Train Instance Accuracy: 0.988821
+2021-03-27 07:14:08,596 - Model - INFO - Test Instance Accuracy: 0.920469, Class Accuracy: 0.895616
+2021-03-27 07:14:08,596 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 07:14:08,596 - Model - INFO - Epoch 166 (166/200):
+2021-03-27 07:17:19,306 - Model - INFO - Train Instance Accuracy: 0.990650
+2021-03-27 07:17:56,088 - Model - INFO - Test Instance Accuracy: 0.920793, Class Accuracy: 0.891144
+2021-03-27 07:17:56,088 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 07:17:56,089 - Model - INFO - Epoch 167 (167/200):
+2021-03-27 07:21:06,830 - Model - INFO - Train Instance Accuracy: 0.987398
+2021-03-27 07:21:42,422 - Model - INFO - Test Instance Accuracy: 0.923301, Class Accuracy: 0.899522
+2021-03-27 07:21:42,422 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 07:21:42,422 - Model - INFO - Epoch 168 (168/200):
+2021-03-27 07:24:50,787 - Model - INFO - Train Instance Accuracy: 0.989228
+2021-03-27 07:25:26,670 - Model - INFO - Test Instance Accuracy: 0.916909, Class Accuracy: 0.895253
+2021-03-27 07:25:26,671 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 07:25:26,671 - Model - INFO - Epoch 169 (169/200):
+2021-03-27 07:28:35,250 - Model - INFO - Train Instance Accuracy: 0.989126
+2021-03-27 07:29:11,376 - Model - INFO - Test Instance Accuracy: 0.922816, Class Accuracy: 0.895519
+2021-03-27 07:29:11,376 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 07:29:11,376 - Model - INFO - Epoch 170 (170/200):
+2021-03-27 07:32:18,373 - Model - INFO - Train Instance Accuracy: 0.989939
+2021-03-27 07:32:54,349 - Model - INFO - Test Instance Accuracy: 0.925324, Class Accuracy: 0.904009
+2021-03-27 07:32:54,349 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 07:32:54,349 - Model - INFO - Epoch 171 (171/200):
+2021-03-27 07:36:03,772 - Model - INFO - Train Instance Accuracy: 0.990447
+2021-03-27 07:36:40,348 - Model - INFO - Test Instance Accuracy: 0.921602, Class Accuracy: 0.896592
+2021-03-27 07:36:40,348 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 07:36:40,349 - Model - INFO - Epoch 172 (172/200):
+2021-03-27 07:39:51,023 - Model - INFO - Train Instance Accuracy: 0.988821
+2021-03-27 07:40:27,219 - Model - INFO - Test Instance Accuracy: 0.924434, Class Accuracy: 0.900599
+2021-03-27 07:40:27,219 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 07:40:27,219 - Model - INFO - Epoch 173 (173/200):
+2021-03-27 07:43:34,309 - Model - INFO - Train Instance Accuracy: 0.989736
+2021-03-27 07:44:10,504 - Model - INFO - Test Instance Accuracy: 0.925728, Class Accuracy: 0.903505
+2021-03-27 07:44:10,504 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 07:44:10,505 - Model - INFO - Epoch 174 (174/200):
+2021-03-27 07:47:18,678 - Model - INFO - Train Instance Accuracy: 0.989431
+2021-03-27 07:47:54,697 - Model - INFO - Test Instance Accuracy: 0.918042, Class Accuracy: 0.894468
+2021-03-27 07:47:54,698 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 07:47:54,698 - Model - INFO - Epoch 175 (175/200):
+2021-03-27 07:51:02,136 - Model - INFO - Train Instance Accuracy: 0.990650
+2021-03-27 07:51:37,738 - Model - INFO - Test Instance Accuracy: 0.919579, Class Accuracy: 0.897540
+2021-03-27 07:51:37,739 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 07:51:37,739 - Model - INFO - Epoch 176 (176/200):
+2021-03-27 07:54:47,418 - Model - INFO - Train Instance Accuracy: 0.990854
+2021-03-27 07:55:25,494 - Model - INFO - Test Instance Accuracy: 0.921602, Class Accuracy: 0.898237
+2021-03-27 07:55:25,494 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 07:55:25,494 - Model - INFO - Epoch 177 (177/200):
+2021-03-27 07:58:37,360 - Model - INFO - Train Instance Accuracy: 0.990854
+2021-03-27 07:59:14,034 - Model - INFO - Test Instance Accuracy: 0.917152, Class Accuracy: 0.891755
+2021-03-27 07:59:14,035 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 07:59:14,035 - Model - INFO - Epoch 178 (178/200):
+2021-03-27 08:02:23,465 - Model - INFO - Train Instance Accuracy: 0.989533
+2021-03-27 08:02:59,148 - Model - INFO - Test Instance Accuracy: 0.918851, Class Accuracy: 0.896431
+2021-03-27 08:02:59,151 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 08:02:59,151 - Model - INFO - Epoch 179 (179/200):
+2021-03-27 08:06:08,336 - Model - INFO - Train Instance Accuracy: 0.988720
+2021-03-27 08:06:44,722 - Model - INFO - Test Instance Accuracy: 0.920793, Class Accuracy: 0.897950
+2021-03-27 08:06:44,723 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 08:06:44,723 - Model - INFO - Epoch 180 (180/200):
+2021-03-27 08:09:51,729 - Model - INFO - Train Instance Accuracy: 0.990549
+2021-03-27 08:10:27,210 - Model - INFO - Test Instance Accuracy: 0.920388, Class Accuracy: 0.893411
+2021-03-27 08:10:27,211 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 08:10:27,211 - Model - INFO - Epoch 181 (181/200):
+2021-03-27 08:13:35,668 - Model - INFO - Train Instance Accuracy: 0.991362
+2021-03-27 08:14:13,629 - Model - INFO - Test Instance Accuracy: 0.924919, Class Accuracy: 0.900658
+2021-03-27 08:14:13,629 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 08:14:13,630 - Model - INFO - Epoch 182 (182/200):
+2021-03-27 08:17:24,627 - Model - INFO - Train Instance Accuracy: 0.988211
+2021-03-27 08:18:00,984 - Model - INFO - Test Instance Accuracy: 0.921602, Class Accuracy: 0.899349
+2021-03-27 08:18:00,985 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 08:18:00,985 - Model - INFO - Epoch 183 (183/200):
+2021-03-27 08:21:09,894 - Model - INFO - Train Instance Accuracy: 0.990244
+2021-03-27 08:21:45,846 - Model - INFO - Test Instance Accuracy: 0.924515, Class Accuracy: 0.900286
+2021-03-27 08:21:45,847 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 08:21:45,847 - Model - INFO - Epoch 184 (184/200):
+2021-03-27 08:24:55,210 - Model - INFO - Train Instance Accuracy: 0.990549
+2021-03-27 08:25:31,479 - Model - INFO - Test Instance Accuracy: 0.921926, Class Accuracy: 0.895515
+2021-03-27 08:25:31,480 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 08:25:31,480 - Model - INFO - Epoch 185 (185/200):
+2021-03-27 08:28:40,751 - Model - INFO - Train Instance Accuracy: 0.989228
+2021-03-27 08:29:16,547 - Model - INFO - Test Instance Accuracy: 0.918366, Class Accuracy: 0.899739
+2021-03-27 08:29:16,548 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 08:29:16,548 - Model - INFO - Epoch 186 (186/200):
+2021-03-27 08:32:23,599 - Model - INFO - Train Instance Accuracy: 0.991565
+2021-03-27 08:32:59,782 - Model - INFO - Test Instance Accuracy: 0.922573, Class Accuracy: 0.899699
+2021-03-27 08:32:59,782 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 08:32:59,782 - Model - INFO - Epoch 187 (187/200):
+2021-03-27 08:36:11,188 - Model - INFO - Train Instance Accuracy: 0.991768
+2021-03-27 08:36:48,491 - Model - INFO - Test Instance Accuracy: 0.922006, Class Accuracy: 0.899470
+2021-03-27 08:36:48,491 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 08:36:48,491 - Model - INFO - Epoch 188 (188/200):
+2021-03-27 08:39:58,395 - Model - INFO - Train Instance Accuracy: 0.990346
+2021-03-27 08:40:34,925 - Model - INFO - Test Instance Accuracy: 0.924029, Class Accuracy: 0.900154
+2021-03-27 08:40:34,926 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 08:40:34,926 - Model - INFO - Epoch 189 (189/200):
+2021-03-27 08:43:42,337 - Model - INFO - Train Instance Accuracy: 0.991667
+2021-03-27 08:44:18,184 - Model - INFO - Test Instance Accuracy: 0.921521, Class Accuracy: 0.899520
+2021-03-27 08:44:18,184 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 08:44:18,185 - Model - INFO - Epoch 190 (190/200):
+2021-03-27 08:47:26,043 - Model - INFO - Train Instance Accuracy: 0.991260
+2021-03-27 08:48:02,164 - Model - INFO - Test Instance Accuracy: 0.919903, Class Accuracy: 0.894842
+2021-03-27 08:48:02,165 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 08:48:02,165 - Model - INFO - Epoch 191 (191/200):
+2021-03-27 08:51:09,116 - Model - INFO - Train Instance Accuracy: 0.990650
+2021-03-27 08:51:45,297 - Model - INFO - Test Instance Accuracy: 0.920793, Class Accuracy: 0.897560
+2021-03-27 08:51:45,297 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 08:51:45,297 - Model - INFO - Epoch 192 (192/200):
+2021-03-27 08:54:54,774 - Model - INFO - Train Instance Accuracy: 0.991057
+2021-03-27 08:55:31,753 - Model - INFO - Test Instance Accuracy: 0.922816, Class Accuracy: 0.895095
+2021-03-27 08:55:31,753 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 08:55:31,753 - Model - INFO - Epoch 193 (193/200):
+2021-03-27 08:58:43,316 - Model - INFO - Train Instance Accuracy: 0.990244
+2021-03-27 08:59:20,148 - Model - INFO - Test Instance Accuracy: 0.925243, Class Accuracy: 0.901845
+2021-03-27 08:59:20,148 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 08:59:20,148 - Model - INFO - Epoch 194 (194/200):
+2021-03-27 09:02:29,022 - Model - INFO - Train Instance Accuracy: 0.991260
+2021-03-27 09:03:04,461 - Model - INFO - Test Instance Accuracy: 0.920793, Class Accuracy: 0.895953
+2021-03-27 09:03:04,462 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 09:03:04,462 - Model - INFO - Epoch 195 (195/200):
+2021-03-27 09:06:12,013 - Model - INFO - Train Instance Accuracy: 0.989533
+2021-03-27 09:06:48,017 - Model - INFO - Test Instance Accuracy: 0.922896, Class Accuracy: 0.900085
+2021-03-27 09:06:48,018 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 09:06:48,018 - Model - INFO - Epoch 196 (196/200):
+2021-03-27 09:09:56,594 - Model - INFO - Train Instance Accuracy: 0.990955
+2021-03-27 09:10:32,268 - Model - INFO - Test Instance Accuracy: 0.922411, Class Accuracy: 0.898589
+2021-03-27 09:10:32,268 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 09:10:32,268 - Model - INFO - Epoch 197 (197/200):
+2021-03-27 09:13:39,165 - Model - INFO - Train Instance Accuracy: 0.991159
+2021-03-27 09:14:14,879 - Model - INFO - Test Instance Accuracy: 0.922249, Class Accuracy: 0.897890
+2021-03-27 09:14:14,879 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 09:14:14,879 - Model - INFO - Epoch 198 (198/200):
+2021-03-27 09:17:26,543 - Model - INFO - Train Instance Accuracy: 0.991972
+2021-03-27 09:18:03,273 - Model - INFO - Test Instance Accuracy: 0.924919, Class Accuracy: 0.901724
+2021-03-27 09:18:03,273 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 09:18:03,273 - Model - INFO - Epoch 199 (199/200):
+2021-03-27 09:21:14,396 - Model - INFO - Train Instance Accuracy: 0.991870
+2021-03-27 09:21:50,000 - Model - INFO - Test Instance Accuracy: 0.917961, Class Accuracy: 0.896603
+2021-03-27 09:21:50,000 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 09:21:50,000 - Model - INFO - Epoch 200 (200/200):
+2021-03-27 09:24:57,857 - Model - INFO - Train Instance Accuracy: 0.989634
+2021-03-27 09:25:34,375 - Model - INFO - Test Instance Accuracy: 0.920388, Class Accuracy: 0.896692
+2021-03-27 09:25:34,375 - Model - INFO - Best Instance Accuracy: 0.928964, Class Accuracy: 0.907417
+2021-03-27 09:25:34,375 - Model - INFO - End of training...
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/classification/pointnet2_msg_normals/pointnet2_cls_msg.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/classification/pointnet2_msg_normals/pointnet2_cls_msg.py
new file mode 100644
index 0000000000..40e2ca3f37
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/classification/pointnet2_msg_normals/pointnet2_cls_msg.py
@@ -0,0 +1,51 @@
+import torch.nn as nn
+import torch.nn.functional as F
+from pointnet2_utils import PointNetSetAbstractionMsg, PointNetSetAbstraction
+
+
+class get_model(nn.Module):
+ def __init__(self,num_class,normal_channel=True):
+ super(get_model, self).__init__()
+ in_channel = 3 if normal_channel else 0
+ self.normal_channel = normal_channel
+ self.sa1 = PointNetSetAbstractionMsg(512, [0.1, 0.2, 0.4], [16, 32, 128], in_channel,[[32, 32, 64], [64, 64, 128], [64, 96, 128]])
+ self.sa2 = PointNetSetAbstractionMsg(128, [0.2, 0.4, 0.8], [32, 64, 128], 320,[[64, 64, 128], [128, 128, 256], [128, 128, 256]])
+ self.sa3 = PointNetSetAbstraction(None, None, None, 640 + 3, [256, 512, 1024], True)
+ self.fc1 = nn.Linear(1024, 512)
+ self.bn1 = nn.BatchNorm1d(512)
+ self.drop1 = nn.Dropout(0.4)
+ self.fc2 = nn.Linear(512, 256)
+ self.bn2 = nn.BatchNorm1d(256)
+ self.drop2 = nn.Dropout(0.5)
+ self.fc3 = nn.Linear(256, num_class)
+
+ def forward(self, xyz):
+ B, _, _ = xyz.shape
+ if self.normal_channel:
+ norm = xyz[:, 3:, :]
+ xyz = xyz[:, :3, :]
+ else:
+ norm = None
+ l1_xyz, l1_points = self.sa1(xyz, norm)
+ l2_xyz, l2_points = self.sa2(l1_xyz, l1_points)
+ l3_xyz, l3_points = self.sa3(l2_xyz, l2_points)
+ x = l3_points.view(B, 1024)
+ x = self.drop1(F.relu(self.bn1(self.fc1(x)), inplace=True))
+ x = self.drop2(F.relu(self.bn2(self.fc2(x)), inplace=True))
+ x = self.fc3(x)
+ x = F.log_softmax(x, -1)
+
+
+ return x,l3_points
+
+
+class get_loss(nn.Module):
+ def __init__(self):
+ super(get_loss, self).__init__()
+
+ def forward(self, pred, target, trans_feat):
+ total_loss = F.nll_loss(pred, target)
+
+ return total_loss
+
+
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/classification/pointnet2_msg_normals/pointnet2_utils.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/classification/pointnet2_msg_normals/pointnet2_utils.py
new file mode 100644
index 0000000000..4ef8763727
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/classification/pointnet2_msg_normals/pointnet2_utils.py
@@ -0,0 +1,316 @@
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+from time import time
+import numpy as np
+
+def timeit(tag, t):
+ print("{}: {}s".format(tag, time() - t))
+ return time()
+
+def pc_normalize(pc):
+ l = pc.shape[0]
+ centroid = np.mean(pc, axis=0)
+ pc = pc - centroid
+ m = np.max(np.sqrt(np.sum(pc**2, axis=1)))
+ pc = pc / m
+ return pc
+
+def square_distance(src, dst):
+ """
+ Calculate Euclid distance between each two points.
+
+ src^T * dst = xn * xm + yn * ym + zn * zmï¼›
+ sum(src^2, dim=-1) = xn*xn + yn*yn + zn*zn;
+ sum(dst^2, dim=-1) = xm*xm + ym*ym + zm*zm;
+ dist = (xn - xm)^2 + (yn - ym)^2 + (zn - zm)^2
+ = sum(src**2,dim=-1)+sum(dst**2,dim=-1)-2*src^T*dst
+
+ Input:
+ src: source points, [B, N, C]
+ dst: target points, [B, M, C]
+ Output:
+ dist: per-point square distance, [B, N, M]
+ """
+ B, N, _ = src.shape
+ _, M, _ = dst.shape
+ dist = -2 * torch.matmul(src, dst.permute(0, 2, 1))
+ dist += torch.sum(src ** 2, -1).view(B, N, 1)
+ dist += torch.sum(dst ** 2, -1).view(B, 1, M)
+ return dist
+
+
+def index_points(points, idx):
+ """
+
+ Input:
+ points: input points data, [B, N, C]
+ idx: sample index data, [B, S]
+ Return:
+ new_points:, indexed points data, [B, S, C]
+ """
+ device = points.device
+ B = points.shape[0]
+ view_shape = list(idx.shape)
+ view_shape[1:] = [1] * (len(view_shape) - 1)
+ repeat_shape = list(idx.shape)
+ repeat_shape[0] = 1
+ batch_indices = torch.arange(B, dtype=torch.long).to(device).view(view_shape).repeat(repeat_shape)
+ new_points = points[batch_indices, idx, :]
+ return new_points
+
+
+def farthest_point_sample(xyz, npoint):
+ """
+ Input:
+ xyz: pointcloud data, [B, N, 3]
+ npoint: number of samples
+ Return:
+ centroids: sampled pointcloud index, [B, npoint]
+ """
+ device = xyz.device
+ B, N, C = xyz.shape
+ centroids = torch.zeros(B, npoint, dtype=torch.long).to(device)
+ distance = torch.ones(B, N).to(device) * 1e10
+ farthest = torch.randint(0, N, (B,), dtype=torch.long).to(device)
+ batch_indices = torch.arange(B, dtype=torch.long).to(device)
+ for i in range(npoint):
+ centroids[:, i] = farthest
+ centroid = xyz[batch_indices, farthest, :].view(B, 1, 3)
+ dist = torch.sum((xyz - centroid) ** 2, -1)
+ mask = dist < distance
+ distance[mask] = dist[mask]
+ farthest = torch.max(distance, -1)[1]
+ return centroids
+
+
+def query_ball_point(radius, nsample, xyz, new_xyz):
+ """
+ Input:
+ radius: local region radius
+ nsample: max sample number in local region
+ xyz: all points, [B, N, 3]
+ new_xyz: query points, [B, S, 3]
+ Return:
+ group_idx: grouped points index, [B, S, nsample]
+ """
+ device = xyz.device
+ B, N, C = xyz.shape
+ _, S, _ = new_xyz.shape
+ group_idx = torch.arange(N, dtype=torch.long).to(device).view(1, 1, N).repeat([B, S, 1])
+ sqrdists = square_distance(new_xyz, xyz)
+ group_idx[sqrdists > radius ** 2] = N
+ group_idx = group_idx.sort(dim=-1)[0][:, :, :nsample]
+ group_first = group_idx[:, :, 0].view(B, S, 1).repeat([1, 1, nsample])
+ mask = group_idx == N
+ group_idx[mask] = group_first[mask]
+ return group_idx
+
+
+def sample_and_group(npoint, radius, nsample, xyz, points, returnfps=False):
+ """
+ Input:
+ npoint:
+ radius:
+ nsample:
+ xyz: input points position data, [B, N, 3]
+ points: input points data, [B, N, D]
+ Return:
+ new_xyz: sampled points position data, [B, npoint, nsample, 3]
+ new_points: sampled points data, [B, npoint, nsample, 3+D]
+ """
+ B, N, C = xyz.shape
+ S = npoint
+ fps_idx = farthest_point_sample(xyz, npoint) # [B, npoint, C]
+ new_xyz = index_points(xyz, fps_idx)
+ idx = query_ball_point(radius, nsample, xyz, new_xyz)
+ grouped_xyz = index_points(xyz, idx) # [B, npoint, nsample, C]
+ grouped_xyz_norm = grouped_xyz - new_xyz.view(B, S, 1, C)
+
+ if points is not None:
+ grouped_points = index_points(points, idx)
+ new_points = torch.cat([grouped_xyz_norm, grouped_points], dim=-1) # [B, npoint, nsample, C+D]
+ else:
+ new_points = grouped_xyz_norm
+ if returnfps:
+ return new_xyz, new_points, grouped_xyz, fps_idx
+ else:
+ return new_xyz, new_points
+
+
+def sample_and_group_all(xyz, points):
+ """
+ Input:
+ xyz: input points position data, [B, N, 3]
+ points: input points data, [B, N, D]
+ Return:
+ new_xyz: sampled points position data, [B, 1, 3]
+ new_points: sampled points data, [B, 1, N, 3+D]
+ """
+ device = xyz.device
+ B, N, C = xyz.shape
+ new_xyz = torch.zeros(B, 1, C).to(device)
+ grouped_xyz = xyz.view(B, 1, N, C)
+ if points is not None:
+ new_points = torch.cat([grouped_xyz, points.view(B, 1, N, -1)], dim=-1)
+ else:
+ new_points = grouped_xyz
+ return new_xyz, new_points
+
+
+class PointNetSetAbstraction(nn.Module):
+ def __init__(self, npoint, radius, nsample, in_channel, mlp, group_all):
+ super(PointNetSetAbstraction, self).__init__()
+ self.npoint = npoint
+ self.radius = radius
+ self.nsample = nsample
+ self.mlp_convs = nn.ModuleList()
+ self.mlp_bns = nn.ModuleList()
+ last_channel = in_channel
+ for out_channel in mlp:
+ self.mlp_convs.append(nn.Conv2d(last_channel, out_channel, 1))
+ self.mlp_bns.append(nn.BatchNorm2d(out_channel))
+ last_channel = out_channel
+ self.group_all = group_all
+
+ def forward(self, xyz, points):
+ """
+ Input:
+ xyz: input points position data, [B, C, N]
+ points: input points data, [B, D, N]
+ Return:
+ new_xyz: sampled points position data, [B, C, S]
+ new_points_concat: sample points feature data, [B, D', S]
+ """
+ xyz = xyz.permute(0, 2, 1)
+ if points is not None:
+ points = points.permute(0, 2, 1)
+
+ if self.group_all:
+ new_xyz, new_points = sample_and_group_all(xyz, points)
+ else:
+ new_xyz, new_points = sample_and_group(self.npoint, self.radius, self.nsample, xyz, points)
+ # new_xyz: sampled points position data, [B, npoint, C]
+ # new_points: sampled points data, [B, npoint, nsample, C+D]
+ new_points = new_points.permute(0, 3, 2, 1) # [B, C+D, nsample,npoint]
+ for i, conv in enumerate(self.mlp_convs):
+ bn = self.mlp_bns[i]
+ new_points = F.relu(bn(conv(new_points)), inplace=True)
+
+ new_points = torch.max(new_points, 2)[0]
+ new_xyz = new_xyz.permute(0, 2, 1)
+ return new_xyz, new_points
+
+
+class PointNetSetAbstractionMsg(nn.Module):
+ def __init__(self, npoint, radius_list, nsample_list, in_channel, mlp_list):
+ super(PointNetSetAbstractionMsg, self).__init__()
+ self.npoint = npoint
+ self.radius_list = radius_list
+ self.nsample_list = nsample_list
+ self.conv_blocks = nn.ModuleList()
+ self.bn_blocks = nn.ModuleList()
+ for i in range(len(mlp_list)):
+ convs = nn.ModuleList()
+ bns = nn.ModuleList()
+ last_channel = in_channel + 3
+ for out_channel in mlp_list[i]:
+ convs.append(nn.Conv2d(last_channel, out_channel, 1))
+ bns.append(nn.BatchNorm2d(out_channel))
+ last_channel = out_channel
+ self.conv_blocks.append(convs)
+ self.bn_blocks.append(bns)
+
+ def forward(self, xyz, points):
+ """
+ Input:
+ xyz: input points position data, [B, C, N]
+ points: input points data, [B, D, N]
+ Return:
+ new_xyz: sampled points position data, [B, C, S]
+ new_points_concat: sample points feature data, [B, D', S]
+ """
+ xyz = xyz.permute(0, 2, 1)
+ if points is not None:
+ points = points.permute(0, 2, 1)
+
+ B, N, C = xyz.shape
+ S = self.npoint
+ new_xyz = index_points(xyz, farthest_point_sample(xyz, S))
+ new_points_list = []
+ for i, radius in enumerate(self.radius_list):
+ K = self.nsample_list[i]
+ group_idx = query_ball_point(radius, K, xyz, new_xyz)
+ grouped_xyz = index_points(xyz, group_idx)
+ grouped_xyz -= new_xyz.view(B, S, 1, C)
+ if points is not None:
+ grouped_points = index_points(points, group_idx)
+ grouped_points = torch.cat([grouped_points, grouped_xyz], dim=-1)
+ else:
+ grouped_points = grouped_xyz
+
+ grouped_points = grouped_points.permute(0, 3, 2, 1) # [B, D, K, S]
+ for j in range(len(self.conv_blocks[i])):
+ conv = self.conv_blocks[i][j]
+ bn = self.bn_blocks[i][j]
+ grouped_points = F.relu(bn(conv(grouped_points)), inplace=True)
+ new_points = torch.max(grouped_points, 2)[0] # [B, D', S]
+ new_points_list.append(new_points)
+
+ new_xyz = new_xyz.permute(0, 2, 1)
+ new_points_concat = torch.cat(new_points_list, dim=1)
+ return new_xyz, new_points_concat
+
+
+class PointNetFeaturePropagation(nn.Module):
+ def __init__(self, in_channel, mlp):
+ super(PointNetFeaturePropagation, self).__init__()
+ self.mlp_convs = nn.ModuleList()
+ self.mlp_bns = nn.ModuleList()
+ last_channel = in_channel
+ for out_channel in mlp:
+ self.mlp_convs.append(nn.Conv1d(last_channel, out_channel, 1))
+ self.mlp_bns.append(nn.BatchNorm1d(out_channel))
+ last_channel = out_channel
+
+ def forward(self, xyz1, xyz2, points1, points2):
+ """
+ Input:
+ xyz1: input points position data, [B, C, N]
+ xyz2: sampled input points position data, [B, C, S]
+ points1: input points data, [B, D, N]
+ points2: input points data, [B, D, S]
+ Return:
+ new_points: upsampled points data, [B, D', N]
+ """
+ xyz1 = xyz1.permute(0, 2, 1)
+ xyz2 = xyz2.permute(0, 2, 1)
+
+ points2 = points2.permute(0, 2, 1)
+ B, N, C = xyz1.shape
+ _, S, _ = xyz2.shape
+
+ if S == 1:
+ interpolated_points = points2.repeat(1, N, 1)
+ else:
+ dists = square_distance(xyz1, xyz2)
+ dists, idx = dists.sort(dim=-1)
+ dists, idx = dists[:, :, :3], idx[:, :, :3] # [B, N, 3]
+
+ dist_recip = 1.0 / (dists + 1e-8)
+ norm = torch.sum(dist_recip, dim=2, keepdim=True)
+ weight = dist_recip / norm
+ interpolated_points = torch.sum(index_points(points2, idx) * weight.view(B, N, 3, 1), dim=2)
+
+ if points1 is not None:
+ points1 = points1.permute(0, 2, 1)
+ new_points = torch.cat([points1, interpolated_points], dim=-1)
+ else:
+ new_points = interpolated_points
+
+ new_points = new_points.permute(0, 2, 1)
+ for i, conv in enumerate(self.mlp_convs):
+ bn = self.mlp_bns[i]
+ new_points = F.relu(bn(conv(new_points)), inplace=True)
+ return new_points
+
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/classification/pointnet2_ssg_wo_normals/checkpoints/best_model.pth b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/classification/pointnet2_ssg_wo_normals/checkpoints/best_model.pth
new file mode 100644
index 0000000000..c989e2fcef
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/classification/pointnet2_ssg_wo_normals/checkpoints/best_model.pth differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/classification/pointnet2_ssg_wo_normals/logs/pointnet2_cls_ssg.txt b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/classification/pointnet2_ssg_wo_normals/logs/pointnet2_cls_ssg.txt
new file mode 100644
index 0000000000..5b512db7ff
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/classification/pointnet2_ssg_wo_normals/logs/pointnet2_cls_ssg.txt
@@ -0,0 +1,347 @@
+2021-04-15 05:30:22,278 - Model - INFO - PARAMETER ...
+2021-04-15 05:30:22,278 - Model - INFO - Namespace(batch_size=24, decay_rate=0.0001, epoch=200, gpu='0', learning_rate=0.001, log_dir='pointnet2_ssg_wo_normals', model='pointnet2_cls_ssg', num_category=40, num_point=1024, optimizer='Adam', process_data=True, use_cpu=False, use_normals=False, use_uniform_sample=False)
+2021-04-15 05:30:22,278 - Model - INFO - Load dataset ...
+2021-04-15 05:30:26,801 - Model - INFO - No existing model, starting training from scratch...
+2021-04-15 05:30:26,803 - Model - INFO - Start training...
+2021-04-15 05:30:26,803 - Model - INFO - Epoch 1 (1/200):
+2021-04-15 05:32:14,000 - Model - INFO - Train Instance Accuracy: 0.531707
+2021-04-15 05:32:36,125 - Model - INFO - Test Instance Accuracy: 0.625324, Class Accuracy: 0.561743
+2021-04-15 05:32:36,125 - Model - INFO - Best Instance Accuracy: 0.625324, Class Accuracy: 0.561743
+2021-04-15 05:32:36,125 - Model - INFO - Save model...
+2021-04-15 05:32:36,125 - Model - INFO - Saving at log/classification/pointnet2_ssg_wo_normals/checkpoints/best_model.pth
+2021-04-15 05:32:36,340 - Model - INFO - Epoch 2 (2/200):
+2021-04-15 05:34:24,466 - Model - INFO - Train Instance Accuracy: 0.680894
+2021-04-15 05:34:46,115 - Model - INFO - Test Instance Accuracy: 0.770065, Class Accuracy: 0.699795
+2021-04-15 05:34:46,116 - Model - INFO - Best Instance Accuracy: 0.770065, Class Accuracy: 0.699795
+2021-04-15 05:34:46,116 - Model - INFO - Save model...
+2021-04-15 05:34:46,116 - Model - INFO - Saving at log/classification/pointnet2_ssg_wo_normals/checkpoints/best_model.pth
+2021-04-15 05:34:46,351 - Model - INFO - Epoch 3 (3/200):
+2021-04-15 05:36:50,599 - Model - INFO - Train Instance Accuracy: 0.731809
+2021-04-15 05:37:23,233 - Model - INFO - Test Instance Accuracy: 0.801133, Class Accuracy: 0.759023
+2021-04-15 05:37:23,234 - Model - INFO - Best Instance Accuracy: 0.801133, Class Accuracy: 0.759023
+2021-04-15 05:37:23,234 - Model - INFO - Save model...
+2021-04-15 05:37:23,234 - Model - INFO - Saving at log/classification/pointnet2_ssg_wo_normals/checkpoints/best_model.pth
+2021-04-15 05:37:23,484 - Model - INFO - Epoch 4 (4/200):
+2021-04-15 05:39:40,119 - Model - INFO - Train Instance Accuracy: 0.755386
+2021-04-15 05:40:01,083 - Model - INFO - Test Instance Accuracy: 0.818851, Class Accuracy: 0.762730
+2021-04-15 05:40:01,083 - Model - INFO - Best Instance Accuracy: 0.818851, Class Accuracy: 0.762730
+2021-04-15 05:40:01,083 - Model - INFO - Save model...
+2021-04-15 05:40:01,083 - Model - INFO - Saving at log/classification/pointnet2_ssg_wo_normals/checkpoints/best_model.pth
+2021-04-15 05:40:01,293 - Model - INFO - Epoch 5 (5/200):
+2021-04-15 05:42:10,402 - Model - INFO - Train Instance Accuracy: 0.782215
+2021-04-15 05:42:39,409 - Model - INFO - Test Instance Accuracy: 0.854450, Class Accuracy: 0.805735
+2021-04-15 05:42:39,409 - Model - INFO - Best Instance Accuracy: 0.854450, Class Accuracy: 0.805735
+2021-04-15 05:42:39,410 - Model - INFO - Save model...
+2021-04-15 05:42:39,410 - Model - INFO - Saving at log/classification/pointnet2_ssg_wo_normals/checkpoints/best_model.pth
+2021-04-15 05:42:39,646 - Model - INFO - Epoch 6 (6/200):
+2021-04-15 05:44:50,958 - Model - INFO - Train Instance Accuracy: 0.792276
+2021-04-15 05:45:19,523 - Model - INFO - Test Instance Accuracy: 0.815049, Class Accuracy: 0.758266
+2021-04-15 05:45:19,524 - Model - INFO - Best Instance Accuracy: 0.854450, Class Accuracy: 0.805735
+2021-04-15 05:45:19,524 - Model - INFO - Epoch 7 (7/200):
+2021-04-15 05:47:34,836 - Model - INFO - Train Instance Accuracy: 0.809350
+2021-04-15 05:48:03,628 - Model - INFO - Test Instance Accuracy: 0.851861, Class Accuracy: 0.807646
+2021-04-15 05:48:03,629 - Model - INFO - Best Instance Accuracy: 0.854450, Class Accuracy: 0.807646
+2021-04-15 05:48:03,629 - Model - INFO - Epoch 8 (8/200):
+2021-04-15 05:50:06,511 - Model - INFO - Train Instance Accuracy: 0.804167
+2021-04-15 05:50:35,597 - Model - INFO - Test Instance Accuracy: 0.826861, Class Accuracy: 0.770605
+2021-04-15 05:50:35,597 - Model - INFO - Best Instance Accuracy: 0.854450, Class Accuracy: 0.807646
+2021-04-15 05:50:35,597 - Model - INFO - Epoch 9 (9/200):
+2021-04-15 05:52:47,700 - Model - INFO - Train Instance Accuracy: 0.823069
+2021-04-15 05:53:16,377 - Model - INFO - Test Instance Accuracy: 0.840049, Class Accuracy: 0.783198
+2021-04-15 05:53:16,377 - Model - INFO - Best Instance Accuracy: 0.854450, Class Accuracy: 0.807646
+2021-04-15 05:53:16,378 - Model - INFO - Epoch 10 (10/200):
+2021-04-15 05:55:26,376 - Model - INFO - Train Instance Accuracy: 0.830386
+2021-04-15 05:55:53,817 - Model - INFO - Test Instance Accuracy: 0.834951, Class Accuracy: 0.771490
+2021-04-15 05:55:53,818 - Model - INFO - Best Instance Accuracy: 0.854450, Class Accuracy: 0.807646
+2021-04-15 05:55:53,818 - Model - INFO - Epoch 11 (11/200):
+2021-04-15 05:58:11,881 - Model - INFO - Train Instance Accuracy: 0.835264
+2021-04-15 05:58:40,964 - Model - INFO - Test Instance Accuracy: 0.841667, Class Accuracy: 0.790723
+2021-04-15 05:58:40,964 - Model - INFO - Best Instance Accuracy: 0.854450, Class Accuracy: 0.807646
+2021-04-15 05:58:40,964 - Model - INFO - Epoch 12 (12/200):
+2021-04-15 06:00:42,162 - Model - INFO - Train Instance Accuracy: 0.839228
+2021-04-15 06:01:09,049 - Model - INFO - Test Instance Accuracy: 0.862055, Class Accuracy: 0.814482
+2021-04-15 06:01:09,049 - Model - INFO - Best Instance Accuracy: 0.862055, Class Accuracy: 0.814482
+2021-04-15 06:01:09,049 - Model - INFO - Save model...
+2021-04-15 06:01:09,049 - Model - INFO - Saving at log/classification/pointnet2_ssg_wo_normals/checkpoints/best_model.pth
+2021-04-15 06:01:09,275 - Model - INFO - Epoch 13 (13/200):
+2021-04-15 06:03:21,468 - Model - INFO - Train Instance Accuracy: 0.839431
+2021-04-15 06:03:48,521 - Model - INFO - Test Instance Accuracy: 0.799757, Class Accuracy: 0.768581
+2021-04-15 06:03:48,521 - Model - INFO - Best Instance Accuracy: 0.862055, Class Accuracy: 0.814482
+2021-04-15 06:03:48,521 - Model - INFO - Epoch 14 (14/200):
+2021-04-15 06:05:56,312 - Model - INFO - Train Instance Accuracy: 0.844512
+2021-04-15 06:06:26,882 - Model - INFO - Test Instance Accuracy: 0.873706, Class Accuracy: 0.834418
+2021-04-15 06:06:26,882 - Model - INFO - Best Instance Accuracy: 0.873706, Class Accuracy: 0.834418
+2021-04-15 06:06:26,882 - Model - INFO - Save model...
+2021-04-15 06:06:26,883 - Model - INFO - Saving at log/classification/pointnet2_ssg_wo_normals/checkpoints/best_model.pth
+2021-04-15 06:06:27,159 - Model - INFO - Epoch 15 (15/200):
+2021-04-15 06:08:41,844 - Model - INFO - Train Instance Accuracy: 0.846037
+2021-04-15 06:09:10,669 - Model - INFO - Test Instance Accuracy: 0.870307, Class Accuracy: 0.822881
+2021-04-15 06:09:10,669 - Model - INFO - Best Instance Accuracy: 0.873706, Class Accuracy: 0.834418
+2021-04-15 06:09:10,669 - Model - INFO - Epoch 16 (16/200):
+2021-04-15 06:11:09,891 - Model - INFO - Train Instance Accuracy: 0.851728
+2021-04-15 06:11:38,564 - Model - INFO - Test Instance Accuracy: 0.872006, Class Accuracy: 0.820836
+2021-04-15 06:11:38,564 - Model - INFO - Best Instance Accuracy: 0.873706, Class Accuracy: 0.834418
+2021-04-15 06:11:38,565 - Model - INFO - Epoch 17 (17/200):
+2021-04-15 06:13:47,526 - Model - INFO - Train Instance Accuracy: 0.860874
+2021-04-15 06:14:17,201 - Model - INFO - Test Instance Accuracy: 0.886003, Class Accuracy: 0.843735
+2021-04-15 06:14:17,201 - Model - INFO - Best Instance Accuracy: 0.886003, Class Accuracy: 0.843735
+2021-04-15 06:14:17,201 - Model - INFO - Save model...
+2021-04-15 06:14:17,201 - Model - INFO - Saving at log/classification/pointnet2_ssg_wo_normals/checkpoints/best_model.pth
+2021-04-15 06:14:17,430 - Model - INFO - Epoch 18 (18/200):
+2021-04-15 06:16:34,595 - Model - INFO - Train Instance Accuracy: 0.856402
+2021-04-15 06:17:03,823 - Model - INFO - Test Instance Accuracy: 0.851618, Class Accuracy: 0.808481
+2021-04-15 06:17:03,823 - Model - INFO - Best Instance Accuracy: 0.886003, Class Accuracy: 0.843735
+2021-04-15 06:17:03,823 - Model - INFO - Epoch 19 (19/200):
+2021-04-15 06:19:13,449 - Model - INFO - Train Instance Accuracy: 0.861789
+2021-04-15 06:19:45,515 - Model - INFO - Test Instance Accuracy: 0.889887, Class Accuracy: 0.846966
+2021-04-15 06:19:45,515 - Model - INFO - Best Instance Accuracy: 0.889887, Class Accuracy: 0.846966
+2021-04-15 06:19:45,515 - Model - INFO - Save model...
+2021-04-15 06:19:45,515 - Model - INFO - Saving at log/classification/pointnet2_ssg_wo_normals/checkpoints/best_model.pth
+2021-04-15 06:19:45,751 - Model - INFO - Epoch 20 (20/200):
+2021-04-15 06:21:53,352 - Model - INFO - Train Instance Accuracy: 0.878455
+2021-04-15 06:22:20,108 - Model - INFO - Test Instance Accuracy: 0.899434, Class Accuracy: 0.857920
+2021-04-15 06:22:20,109 - Model - INFO - Best Instance Accuracy: 0.899434, Class Accuracy: 0.857920
+2021-04-15 06:22:20,109 - Model - INFO - Save model...
+2021-04-15 06:22:20,109 - Model - INFO - Saving at log/classification/pointnet2_ssg_wo_normals/checkpoints/best_model.pth
+2021-04-15 06:22:20,416 - Model - INFO - Epoch 21 (21/200):
+2021-04-15 06:24:31,873 - Model - INFO - Train Instance Accuracy: 0.884248
+2021-04-15 06:25:00,930 - Model - INFO - Test Instance Accuracy: 0.900971, Class Accuracy: 0.857757
+2021-04-15 06:25:00,930 - Model - INFO - Best Instance Accuracy: 0.900971, Class Accuracy: 0.857920
+2021-04-15 06:25:00,931 - Model - INFO - Save model...
+2021-04-15 06:25:00,931 - Model - INFO - Saving at log/classification/pointnet2_ssg_wo_normals/checkpoints/best_model.pth
+2021-04-15 06:25:01,416 - Model - INFO - Epoch 22 (22/200):
+2021-04-15 06:27:14,083 - Model - INFO - Train Instance Accuracy: 0.880285
+2021-04-15 06:27:42,981 - Model - INFO - Test Instance Accuracy: 0.901052, Class Accuracy: 0.848635
+2021-04-15 06:27:42,982 - Model - INFO - Best Instance Accuracy: 0.901052, Class Accuracy: 0.857920
+2021-04-15 06:27:42,982 - Model - INFO - Save model...
+2021-04-15 06:27:42,982 - Model - INFO - Saving at log/classification/pointnet2_ssg_wo_normals/checkpoints/best_model.pth
+2021-04-15 06:27:43,250 - Model - INFO - Epoch 23 (23/200):
+2021-04-15 06:29:59,007 - Model - INFO - Train Instance Accuracy: 0.888211
+2021-04-15 06:30:13,320 - Model - INFO - Test Instance Accuracy: 0.904854, Class Accuracy: 0.866657
+2021-04-15 06:30:13,321 - Model - INFO - Best Instance Accuracy: 0.904854, Class Accuracy: 0.866657
+2021-04-15 06:30:13,321 - Model - INFO - Save model...
+2021-04-15 06:30:13,321 - Model - INFO - Saving at log/classification/pointnet2_ssg_wo_normals/checkpoints/best_model.pth
+2021-04-15 06:30:13,576 - Model - INFO - Epoch 24 (24/200):
+2021-04-15 06:32:01,965 - Model - INFO - Train Instance Accuracy: 0.886280
+2021-04-15 06:32:25,718 - Model - INFO - Test Instance Accuracy: 0.907120, Class Accuracy: 0.876527
+2021-04-15 06:32:25,719 - Model - INFO - Best Instance Accuracy: 0.907120, Class Accuracy: 0.876527
+2021-04-15 06:32:25,719 - Model - INFO - Save model...
+2021-04-15 06:32:25,719 - Model - INFO - Saving at log/classification/pointnet2_ssg_wo_normals/checkpoints/best_model.pth
+2021-04-15 06:32:25,953 - Model - INFO - Epoch 25 (25/200):
+2021-04-15 06:34:16,547 - Model - INFO - Train Instance Accuracy: 0.881707
+2021-04-15 06:34:38,983 - Model - INFO - Test Instance Accuracy: 0.905340, Class Accuracy: 0.864522
+2021-04-15 06:34:38,983 - Model - INFO - Best Instance Accuracy: 0.907120, Class Accuracy: 0.876527
+2021-04-15 06:34:38,983 - Model - INFO - Epoch 26 (26/200):
+2021-04-15 06:36:30,760 - Model - INFO - Train Instance Accuracy: 0.890447
+2021-04-15 06:36:54,139 - Model - INFO - Test Instance Accuracy: 0.892557, Class Accuracy: 0.864255
+2021-04-15 06:36:54,139 - Model - INFO - Best Instance Accuracy: 0.907120, Class Accuracy: 0.876527
+2021-04-15 06:36:54,139 - Model - INFO - Epoch 27 (27/200):
+2021-04-15 06:38:43,158 - Model - INFO - Train Instance Accuracy: 0.886484
+2021-04-15 06:39:05,616 - Model - INFO - Test Instance Accuracy: 0.908657, Class Accuracy: 0.860172
+2021-04-15 06:39:05,617 - Model - INFO - Best Instance Accuracy: 0.908657, Class Accuracy: 0.876527
+2021-04-15 06:39:05,617 - Model - INFO - Save model...
+2021-04-15 06:39:05,617 - Model - INFO - Saving at log/classification/pointnet2_ssg_wo_normals/checkpoints/best_model.pth
+2021-04-15 06:39:05,907 - Model - INFO - Epoch 28 (28/200):
+2021-04-15 06:40:46,682 - Model - INFO - Train Instance Accuracy: 0.889431
+2021-04-15 06:41:08,187 - Model - INFO - Test Instance Accuracy: 0.903074, Class Accuracy: 0.855049
+2021-04-15 06:41:08,188 - Model - INFO - Best Instance Accuracy: 0.908657, Class Accuracy: 0.876527
+2021-04-15 06:41:08,188 - Model - INFO - Epoch 29 (29/200):
+2021-04-15 06:42:56,125 - Model - INFO - Train Instance Accuracy: 0.892683
+2021-04-15 06:43:19,909 - Model - INFO - Test Instance Accuracy: 0.896359, Class Accuracy: 0.862396
+2021-04-15 06:43:19,909 - Model - INFO - Best Instance Accuracy: 0.908657, Class Accuracy: 0.876527
+2021-04-15 06:43:19,910 - Model - INFO - Epoch 30 (30/200):
+2021-04-15 06:45:16,038 - Model - INFO - Train Instance Accuracy: 0.895020
+2021-04-15 06:45:38,281 - Model - INFO - Test Instance Accuracy: 0.894175, Class Accuracy: 0.850114
+2021-04-15 06:45:38,282 - Model - INFO - Best Instance Accuracy: 0.908657, Class Accuracy: 0.876527
+2021-04-15 06:45:38,282 - Model - INFO - Epoch 31 (31/200):
+2021-04-15 06:47:35,299 - Model - INFO - Train Instance Accuracy: 0.893699
+2021-04-15 06:48:02,039 - Model - INFO - Test Instance Accuracy: 0.898544, Class Accuracy: 0.853428
+2021-04-15 06:48:02,039 - Model - INFO - Best Instance Accuracy: 0.908657, Class Accuracy: 0.876527
+2021-04-15 06:48:02,039 - Model - INFO - Epoch 32 (32/200):
+2021-04-15 06:49:40,731 - Model - INFO - Train Instance Accuracy: 0.898882
+2021-04-15 06:50:04,526 - Model - INFO - Test Instance Accuracy: 0.906715, Class Accuracy: 0.874717
+2021-04-15 06:50:04,527 - Model - INFO - Best Instance Accuracy: 0.908657, Class Accuracy: 0.876527
+2021-04-15 06:50:04,527 - Model - INFO - Epoch 33 (33/200):
+2021-04-15 06:51:43,807 - Model - INFO - Train Instance Accuracy: 0.896646
+2021-04-15 06:52:06,721 - Model - INFO - Test Instance Accuracy: 0.912702, Class Accuracy: 0.878564
+2021-04-15 06:52:06,722 - Model - INFO - Best Instance Accuracy: 0.912702, Class Accuracy: 0.878564
+2021-04-15 06:52:06,722 - Model - INFO - Save model...
+2021-04-15 06:52:06,722 - Model - INFO - Saving at log/classification/pointnet2_ssg_wo_normals/checkpoints/best_model.pth
+2021-04-15 06:52:06,950 - Model - INFO - Epoch 34 (34/200):
+2021-04-15 06:53:47,005 - Model - INFO - Train Instance Accuracy: 0.895427
+2021-04-15 06:54:11,595 - Model - INFO - Test Instance Accuracy: 0.893204, Class Accuracy: 0.855497
+2021-04-15 06:54:11,595 - Model - INFO - Best Instance Accuracy: 0.912702, Class Accuracy: 0.878564
+2021-04-15 06:54:11,595 - Model - INFO - Epoch 35 (35/200):
+2021-04-15 06:55:55,303 - Model - INFO - Train Instance Accuracy: 0.896646
+2021-04-15 06:56:18,493 - Model - INFO - Test Instance Accuracy: 0.909466, Class Accuracy: 0.864530
+2021-04-15 06:56:18,495 - Model - INFO - Best Instance Accuracy: 0.912702, Class Accuracy: 0.878564
+2021-04-15 06:56:18,495 - Model - INFO - Epoch 36 (36/200):
+2021-04-15 06:58:15,600 - Model - INFO - Train Instance Accuracy: 0.895528
+2021-04-15 06:58:41,327 - Model - INFO - Test Instance Accuracy: 0.902832, Class Accuracy: 0.857887
+2021-04-15 06:58:41,327 - Model - INFO - Best Instance Accuracy: 0.912702, Class Accuracy: 0.878564
+2021-04-15 06:58:41,328 - Model - INFO - Epoch 37 (37/200):
+2021-04-15 07:00:23,245 - Model - INFO - Train Instance Accuracy: 0.905386
+2021-04-15 07:00:47,393 - Model - INFO - Test Instance Accuracy: 0.909304, Class Accuracy: 0.873091
+2021-04-15 07:00:47,393 - Model - INFO - Best Instance Accuracy: 0.912702, Class Accuracy: 0.878564
+2021-04-15 07:00:47,394 - Model - INFO - Epoch 38 (38/200):
+2021-04-15 07:02:32,678 - Model - INFO - Train Instance Accuracy: 0.902033
+2021-04-15 07:02:59,090 - Model - INFO - Test Instance Accuracy: 0.898948, Class Accuracy: 0.863316
+2021-04-15 07:02:59,091 - Model - INFO - Best Instance Accuracy: 0.912702, Class Accuracy: 0.878564
+2021-04-15 07:02:59,091 - Model - INFO - Epoch 39 (39/200):
+2021-04-15 07:04:38,589 - Model - INFO - Train Instance Accuracy: 0.903659
+2021-04-15 07:05:03,771 - Model - INFO - Test Instance Accuracy: 0.902508, Class Accuracy: 0.871518
+2021-04-15 07:05:03,771 - Model - INFO - Best Instance Accuracy: 0.912702, Class Accuracy: 0.878564
+2021-04-15 07:05:03,771 - Model - INFO - Epoch 40 (40/200):
+2021-04-15 07:06:53,198 - Model - INFO - Train Instance Accuracy: 0.908943
+2021-04-15 07:07:17,261 - Model - INFO - Test Instance Accuracy: 0.915372, Class Accuracy: 0.882057
+2021-04-15 07:07:17,262 - Model - INFO - Best Instance Accuracy: 0.915372, Class Accuracy: 0.882057
+2021-04-15 07:07:17,262 - Model - INFO - Save model...
+2021-04-15 07:07:17,262 - Model - INFO - Saving at log/classification/pointnet2_ssg_wo_normals/checkpoints/best_model.pth
+2021-04-15 07:07:17,541 - Model - INFO - Epoch 41 (41/200):
+2021-04-15 07:09:12,811 - Model - INFO - Train Instance Accuracy: 0.916972
+2021-04-15 07:09:37,593 - Model - INFO - Test Instance Accuracy: 0.917718, Class Accuracy: 0.870650
+2021-04-15 07:09:37,594 - Model - INFO - Best Instance Accuracy: 0.917718, Class Accuracy: 0.882057
+2021-04-15 07:09:37,594 - Model - INFO - Save model...
+2021-04-15 07:09:37,594 - Model - INFO - Saving at log/classification/pointnet2_ssg_wo_normals/checkpoints/best_model.pth
+2021-04-15 07:09:37,872 - Model - INFO - Epoch 42 (42/200):
+2021-04-15 07:11:26,080 - Model - INFO - Train Instance Accuracy: 0.917480
+2021-04-15 07:11:50,181 - Model - INFO - Test Instance Accuracy: 0.909385, Class Accuracy: 0.873685
+2021-04-15 07:11:50,182 - Model - INFO - Best Instance Accuracy: 0.917718, Class Accuracy: 0.882057
+2021-04-15 07:11:50,182 - Model - INFO - Epoch 43 (43/200):
+2021-04-15 07:13:38,760 - Model - INFO - Train Instance Accuracy: 0.919207
+2021-04-15 07:14:03,060 - Model - INFO - Test Instance Accuracy: 0.919256, Class Accuracy: 0.882563
+2021-04-15 07:14:03,060 - Model - INFO - Best Instance Accuracy: 0.919256, Class Accuracy: 0.882563
+2021-04-15 07:14:03,060 - Model - INFO - Save model...
+2021-04-15 07:14:03,060 - Model - INFO - Saving at log/classification/pointnet2_ssg_wo_normals/checkpoints/best_model.pth
+2021-04-15 07:14:03,314 - Model - INFO - Epoch 44 (44/200):
+2021-04-15 07:15:52,934 - Model - INFO - Train Instance Accuracy: 0.916768
+2021-04-15 07:16:17,463 - Model - INFO - Test Instance Accuracy: 0.911489, Class Accuracy: 0.878203
+2021-04-15 07:16:17,464 - Model - INFO - Best Instance Accuracy: 0.919256, Class Accuracy: 0.882563
+2021-04-15 07:16:17,464 - Model - INFO - Epoch 45 (45/200):
+2021-04-15 07:18:07,975 - Model - INFO - Train Instance Accuracy: 0.912093
+2021-04-15 07:18:32,203 - Model - INFO - Test Instance Accuracy: 0.902023, Class Accuracy: 0.868103
+2021-04-15 07:18:32,203 - Model - INFO - Best Instance Accuracy: 0.919256, Class Accuracy: 0.882563
+2021-04-15 07:18:32,203 - Model - INFO - Epoch 46 (46/200):
+2021-04-15 07:20:10,362 - Model - INFO - Train Instance Accuracy: 0.918496
+2021-04-15 07:20:33,434 - Model - INFO - Test Instance Accuracy: 0.919984, Class Accuracy: 0.884730
+2021-04-15 07:20:33,435 - Model - INFO - Best Instance Accuracy: 0.919984, Class Accuracy: 0.884730
+2021-04-15 07:20:33,435 - Model - INFO - Save model...
+2021-04-15 07:20:33,435 - Model - INFO - Saving at log/classification/pointnet2_ssg_wo_normals/checkpoints/best_model.pth
+2021-04-15 07:20:33,750 - Model - INFO - Epoch 47 (47/200):
+2021-04-15 07:22:14,546 - Model - INFO - Train Instance Accuracy: 0.923476
+2021-04-15 07:22:38,108 - Model - INFO - Test Instance Accuracy: 0.911327, Class Accuracy: 0.880924
+2021-04-15 07:22:38,108 - Model - INFO - Best Instance Accuracy: 0.919984, Class Accuracy: 0.884730
+2021-04-15 07:22:38,108 - Model - INFO - Epoch 48 (48/200):
+2021-04-15 07:24:28,595 - Model - INFO - Train Instance Accuracy: 0.917683
+2021-04-15 07:24:50,967 - Model - INFO - Test Instance Accuracy: 0.913592, Class Accuracy: 0.885645
+2021-04-15 07:24:50,967 - Model - INFO - Best Instance Accuracy: 0.919984, Class Accuracy: 0.885645
+2021-04-15 07:24:50,968 - Model - INFO - Epoch 49 (49/200):
+2021-04-15 07:26:30,556 - Model - INFO - Train Instance Accuracy: 0.918293
+2021-04-15 07:26:53,167 - Model - INFO - Test Instance Accuracy: 0.911327, Class Accuracy: 0.873041
+2021-04-15 07:26:53,168 - Model - INFO - Best Instance Accuracy: 0.919984, Class Accuracy: 0.885645
+2021-04-15 07:26:53,168 - Model - INFO - Epoch 50 (50/200):
+2021-04-15 07:28:42,338 - Model - INFO - Train Instance Accuracy: 0.917480
+2021-04-15 07:29:09,396 - Model - INFO - Test Instance Accuracy: 0.917961, Class Accuracy: 0.886098
+2021-04-15 07:29:09,397 - Model - INFO - Best Instance Accuracy: 0.919984, Class Accuracy: 0.886098
+2021-04-15 07:29:09,397 - Model - INFO - Epoch 51 (51/200):
+2021-04-15 07:30:53,641 - Model - INFO - Train Instance Accuracy: 0.917480
+2021-04-15 07:31:18,052 - Model - INFO - Test Instance Accuracy: 0.914887, Class Accuracy: 0.875262
+2021-04-15 07:31:18,053 - Model - INFO - Best Instance Accuracy: 0.919984, Class Accuracy: 0.886098
+2021-04-15 07:31:18,053 - Model - INFO - Epoch 52 (52/200):
+2021-04-15 07:33:05,639 - Model - INFO - Train Instance Accuracy: 0.920935
+2021-04-15 07:33:28,699 - Model - INFO - Test Instance Accuracy: 0.903803, Class Accuracy: 0.866199
+2021-04-15 07:33:28,700 - Model - INFO - Best Instance Accuracy: 0.919984, Class Accuracy: 0.886098
+2021-04-15 07:33:28,700 - Model - INFO - Epoch 53 (53/200):
+2021-04-15 07:35:07,945 - Model - INFO - Train Instance Accuracy: 0.919411
+2021-04-15 07:35:30,318 - Model - INFO - Test Instance Accuracy: 0.909951, Class Accuracy: 0.883292
+2021-04-15 07:35:30,318 - Model - INFO - Best Instance Accuracy: 0.919984, Class Accuracy: 0.886098
+2021-04-15 07:35:30,319 - Model - INFO - Epoch 54 (54/200):
+2021-04-15 07:37:24,699 - Model - INFO - Train Instance Accuracy: 0.922358
+2021-04-15 07:37:50,792 - Model - INFO - Test Instance Accuracy: 0.911489, Class Accuracy: 0.878879
+2021-04-15 07:37:50,792 - Model - INFO - Best Instance Accuracy: 0.919984, Class Accuracy: 0.886098
+2021-04-15 07:37:50,792 - Model - INFO - Epoch 55 (55/200):
+2021-04-15 07:39:36,926 - Model - INFO - Train Instance Accuracy: 0.917785
+2021-04-15 07:40:00,452 - Model - INFO - Test Instance Accuracy: 0.910841, Class Accuracy: 0.879716
+2021-04-15 07:40:00,452 - Model - INFO - Best Instance Accuracy: 0.919984, Class Accuracy: 0.886098
+2021-04-15 07:40:00,452 - Model - INFO - Epoch 56 (56/200):
+2021-04-15 07:41:44,314 - Model - INFO - Train Instance Accuracy: 0.923476
+2021-04-15 07:42:08,700 - Model - INFO - Test Instance Accuracy: 0.905502, Class Accuracy: 0.868091
+2021-04-15 07:42:08,701 - Model - INFO - Best Instance Accuracy: 0.919984, Class Accuracy: 0.886098
+2021-04-15 07:42:08,701 - Model - INFO - Epoch 57 (57/200):
+2021-04-15 07:43:46,504 - Model - INFO - Train Instance Accuracy: 0.922866
+2021-04-15 07:44:08,740 - Model - INFO - Test Instance Accuracy: 0.907282, Class Accuracy: 0.875386
+2021-04-15 07:44:08,740 - Model - INFO - Best Instance Accuracy: 0.919984, Class Accuracy: 0.886098
+2021-04-15 07:44:08,740 - Model - INFO - Epoch 58 (58/200):
+2021-04-15 07:46:01,728 - Model - INFO - Train Instance Accuracy: 0.922053
+2021-04-15 07:46:27,185 - Model - INFO - Test Instance Accuracy: 0.913592, Class Accuracy: 0.879110
+2021-04-15 07:46:27,185 - Model - INFO - Best Instance Accuracy: 0.919984, Class Accuracy: 0.886098
+2021-04-15 07:46:27,185 - Model - INFO - Epoch 59 (59/200):
+2021-04-15 07:48:14,888 - Model - INFO - Train Instance Accuracy: 0.923577
+2021-04-15 07:48:37,721 - Model - INFO - Test Instance Accuracy: 0.908819, Class Accuracy: 0.871531
+2021-04-15 07:48:37,721 - Model - INFO - Best Instance Accuracy: 0.919984, Class Accuracy: 0.886098
+2021-04-15 07:48:37,721 - Model - INFO - Epoch 60 (60/200):
+2021-04-15 07:50:14,850 - Model - INFO - Train Instance Accuracy: 0.931402
+2021-04-15 07:50:37,963 - Model - INFO - Test Instance Accuracy: 0.919256, Class Accuracy: 0.887591
+2021-04-15 07:50:37,963 - Model - INFO - Best Instance Accuracy: 0.919984, Class Accuracy: 0.887591
+2021-04-15 07:50:37,963 - Model - INFO - Epoch 61 (61/200):
+2021-04-15 07:52:26,714 - Model - INFO - Train Instance Accuracy: 0.930691
+2021-04-15 07:52:50,398 - Model - INFO - Test Instance Accuracy: 0.908981, Class Accuracy: 0.891706
+2021-04-15 07:52:50,399 - Model - INFO - Best Instance Accuracy: 0.919984, Class Accuracy: 0.891706
+2021-04-15 07:52:50,399 - Model - INFO - Epoch 62 (62/200):
+2021-04-15 07:54:44,716 - Model - INFO - Train Instance Accuracy: 0.931606
+2021-04-15 07:55:07,927 - Model - INFO - Test Instance Accuracy: 0.910194, Class Accuracy: 0.888310
+2021-04-15 07:55:07,927 - Model - INFO - Best Instance Accuracy: 0.919984, Class Accuracy: 0.891706
+2021-04-15 07:55:07,928 - Model - INFO - Epoch 63 (63/200):
+2021-04-15 07:56:49,104 - Model - INFO - Train Instance Accuracy: 0.939024
+2021-04-15 07:57:13,992 - Model - INFO - Test Instance Accuracy: 0.915534, Class Accuracy: 0.884778
+2021-04-15 07:57:13,993 - Model - INFO - Best Instance Accuracy: 0.919984, Class Accuracy: 0.891706
+2021-04-15 07:57:13,993 - Model - INFO - Epoch 64 (64/200):
+2021-04-15 07:58:59,907 - Model - INFO - Train Instance Accuracy: 0.933740
+2021-04-15 07:59:23,959 - Model - INFO - Test Instance Accuracy: 0.911246, Class Accuracy: 0.879409
+2021-04-15 07:59:23,960 - Model - INFO - Best Instance Accuracy: 0.919984, Class Accuracy: 0.891706
+2021-04-15 07:59:23,960 - Model - INFO - Epoch 65 (65/200):
+2021-04-15 08:01:02,479 - Model - INFO - Train Instance Accuracy: 0.933841
+2021-04-15 08:01:25,933 - Model - INFO - Test Instance Accuracy: 0.909466, Class Accuracy: 0.869410
+2021-04-15 08:01:25,934 - Model - INFO - Best Instance Accuracy: 0.919984, Class Accuracy: 0.891706
+2021-04-15 08:01:25,934 - Model - INFO - Epoch 66 (66/200):
+2021-04-15 08:03:07,420 - Model - INFO - Train Instance Accuracy: 0.938313
+2021-04-15 08:03:31,234 - Model - INFO - Test Instance Accuracy: 0.913430, Class Accuracy: 0.878615
+2021-04-15 08:03:31,234 - Model - INFO - Best Instance Accuracy: 0.919984, Class Accuracy: 0.891706
+2021-04-15 08:03:31,234 - Model - INFO - Epoch 67 (67/200):
+2021-04-15 08:05:09,089 - Model - INFO - Train Instance Accuracy: 0.936890
+2021-04-15 08:05:31,842 - Model - INFO - Test Instance Accuracy: 0.908738, Class Accuracy: 0.882990
+2021-04-15 08:05:31,842 - Model - INFO - Best Instance Accuracy: 0.919984, Class Accuracy: 0.891706
+2021-04-15 08:05:31,842 - Model - INFO - Epoch 68 (68/200):
+2021-04-15 08:07:28,343 - Model - INFO - Train Instance Accuracy: 0.937703
+2021-04-15 08:07:54,122 - Model - INFO - Test Instance Accuracy: 0.915210, Class Accuracy: 0.885137
+2021-04-15 08:07:54,123 - Model - INFO - Best Instance Accuracy: 0.919984, Class Accuracy: 0.891706
+2021-04-15 08:07:54,123 - Model - INFO - Epoch 69 (69/200):
+2021-04-15 08:09:41,300 - Model - INFO - Train Instance Accuracy: 0.936992
+2021-04-15 08:10:05,464 - Model - INFO - Test Instance Accuracy: 0.911893, Class Accuracy: 0.885623
+2021-04-15 08:10:05,465 - Model - INFO - Best Instance Accuracy: 0.919984, Class Accuracy: 0.891706
+2021-04-15 08:10:05,465 - Model - INFO - Epoch 70 (70/200):
+2021-04-15 08:11:39,150 - Model - INFO - Train Instance Accuracy: 0.937093
+2021-04-15 08:12:02,839 - Model - INFO - Test Instance Accuracy: 0.915049, Class Accuracy: 0.887685
+2021-04-15 08:12:02,840 - Model - INFO - Best Instance Accuracy: 0.919984, Class Accuracy: 0.891706
+2021-04-15 08:12:02,840 - Model - INFO - Epoch 71 (71/200):
+2021-04-15 08:13:55,476 - Model - INFO - Train Instance Accuracy: 0.939431
+2021-04-15 08:14:17,252 - Model - INFO - Test Instance Accuracy: 0.925324, Class Accuracy: 0.884857
+2021-04-15 08:14:17,252 - Model - INFO - Best Instance Accuracy: 0.925324, Class Accuracy: 0.891706
+2021-04-15 08:14:17,253 - Model - INFO - Save model...
+2021-04-15 08:14:17,253 - Model - INFO - Saving at log/classification/pointnet2_ssg_wo_normals/checkpoints/best_model.pth
+2021-04-15 08:14:17,514 - Model - INFO - Epoch 72 (72/200):
+2021-04-15 08:15:57,731 - Model - INFO - Train Instance Accuracy: 0.939634
+2021-04-15 08:16:23,368 - Model - INFO - Test Instance Accuracy: 0.915210, Class Accuracy: 0.883994
+2021-04-15 08:16:23,368 - Model - INFO - Best Instance Accuracy: 0.925324, Class Accuracy: 0.891706
+2021-04-15 08:16:23,368 - Model - INFO - Epoch 73 (73/200):
+2021-04-15 08:18:21,420 - Model - INFO - Train Instance Accuracy: 0.937093
+2021-04-15 08:18:45,123 - Model - INFO - Test Instance Accuracy: 0.917961, Class Accuracy: 0.883463
+2021-04-15 08:18:45,124 - Model - INFO - Best Instance Accuracy: 0.925324, Class Accuracy: 0.891706
+2021-04-15 08:18:45,124 - Model - INFO - Epoch 74 (74/200):
+2021-04-15 08:20:25,153 - Model - INFO - Train Instance Accuracy: 0.935874
+2021-04-15 08:20:49,300 - Model - INFO - Test Instance Accuracy: 0.914644, Class Accuracy: 0.891132
+2021-04-15 08:20:49,301 - Model - INFO - Best Instance Accuracy: 0.925324, Class Accuracy: 0.891706
+2021-04-15 08:20:49,301 - Model - INFO - Epoch 75 (75/200):
+2021-04-15 08:22:37,667 - Model - INFO - Train Instance Accuracy: 0.940549
+2021-04-15 08:23:00,730 - Model - INFO - Test Instance Accuracy: 0.911570, Class Accuracy: 0.884318
+2021-04-15 08:23:00,731 - Model - INFO - Best Instance Accuracy: 0.925324, Class Accuracy: 0.891706
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/classification/pointnet2_ssg_wo_normals/pointnet2_cls_ssg.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/classification/pointnet2_ssg_wo_normals/pointnet2_cls_ssg.py
new file mode 100644
index 0000000000..4e14ba6a5e
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/classification/pointnet2_ssg_wo_normals/pointnet2_cls_ssg.py
@@ -0,0 +1,50 @@
+import torch.nn as nn
+import torch.nn.functional as F
+from pointnet2_utils import PointNetSetAbstraction
+
+
+class get_model(nn.Module):
+ def __init__(self,num_class,normal_channel=True):
+ super(get_model, self).__init__()
+ in_channel = 6 if normal_channel else 3
+ self.normal_channel = normal_channel
+ self.sa1 = PointNetSetAbstraction(npoint=512, radius=0.2, nsample=32, in_channel=in_channel, mlp=[64, 64, 128], group_all=False)
+ self.sa2 = PointNetSetAbstraction(npoint=128, radius=0.4, nsample=64, in_channel=128 + 3, mlp=[128, 128, 256], group_all=False)
+ self.sa3 = PointNetSetAbstraction(npoint=None, radius=None, nsample=None, in_channel=256 + 3, mlp=[256, 512, 1024], group_all=True)
+ self.fc1 = nn.Linear(1024, 512)
+ self.bn1 = nn.BatchNorm1d(512)
+ self.drop1 = nn.Dropout(0.4)
+ self.fc2 = nn.Linear(512, 256)
+ self.bn2 = nn.BatchNorm1d(256)
+ self.drop2 = nn.Dropout(0.4)
+ self.fc3 = nn.Linear(256, num_class)
+
+ def forward(self, xyz):
+ B, _, _ = xyz.shape
+ if self.normal_channel:
+ norm = xyz[:, 3:, :]
+ xyz = xyz[:, :3, :]
+ else:
+ norm = None
+ l1_xyz, l1_points = self.sa1(xyz, norm)
+ l2_xyz, l2_points = self.sa2(l1_xyz, l1_points)
+ l3_xyz, l3_points = self.sa3(l2_xyz, l2_points)
+ x = l3_points.view(B, 1024)
+ x = self.drop1(F.relu(self.bn1(self.fc1(x))))
+ x = self.drop2(F.relu(self.bn2(self.fc2(x))))
+ x = self.fc3(x)
+ x = F.log_softmax(x, -1)
+
+
+ return x, l3_points
+
+
+
+class get_loss(nn.Module):
+ def __init__(self):
+ super(get_loss, self).__init__()
+
+ def forward(self, pred, target, trans_feat):
+ total_loss = F.nll_loss(pred, target)
+
+ return total_loss
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/classification/pointnet2_ssg_wo_normals/pointnet2_utils.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/classification/pointnet2_ssg_wo_normals/pointnet2_utils.py
new file mode 100644
index 0000000000..d13986ed62
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/classification/pointnet2_ssg_wo_normals/pointnet2_utils.py
@@ -0,0 +1,316 @@
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+from time import time
+import numpy as np
+
+def timeit(tag, t):
+ print("{}: {}s".format(tag, time() - t))
+ return time()
+
+def pc_normalize(pc):
+ l = pc.shape[0]
+ centroid = np.mean(pc, axis=0)
+ pc = pc - centroid
+ m = np.max(np.sqrt(np.sum(pc**2, axis=1)))
+ pc = pc / m
+ return pc
+
+def square_distance(src, dst):
+ """
+ Calculate Euclid distance between each two points.
+
+ src^T * dst = xn * xm + yn * ym + zn * zmï¼›
+ sum(src^2, dim=-1) = xn*xn + yn*yn + zn*zn;
+ sum(dst^2, dim=-1) = xm*xm + ym*ym + zm*zm;
+ dist = (xn - xm)^2 + (yn - ym)^2 + (zn - zm)^2
+ = sum(src**2,dim=-1)+sum(dst**2,dim=-1)-2*src^T*dst
+
+ Input:
+ src: source points, [B, N, C]
+ dst: target points, [B, M, C]
+ Output:
+ dist: per-point square distance, [B, N, M]
+ """
+ B, N, _ = src.shape
+ _, M, _ = dst.shape
+ dist = -2 * torch.matmul(src, dst.permute(0, 2, 1))
+ dist += torch.sum(src ** 2, -1).view(B, N, 1)
+ dist += torch.sum(dst ** 2, -1).view(B, 1, M)
+ return dist
+
+
+def index_points(points, idx):
+ """
+
+ Input:
+ points: input points data, [B, N, C]
+ idx: sample index data, [B, S]
+ Return:
+ new_points:, indexed points data, [B, S, C]
+ """
+ device = points.device
+ B = points.shape[0]
+ view_shape = list(idx.shape)
+ view_shape[1:] = [1] * (len(view_shape) - 1)
+ repeat_shape = list(idx.shape)
+ repeat_shape[0] = 1
+ batch_indices = torch.arange(B, dtype=torch.long).to(device).view(view_shape).repeat(repeat_shape)
+ new_points = points[batch_indices, idx, :]
+ return new_points
+
+
+def farthest_point_sample(xyz, npoint):
+ """
+ Input:
+ xyz: pointcloud data, [B, N, 3]
+ npoint: number of samples
+ Return:
+ centroids: sampled pointcloud index, [B, npoint]
+ """
+ device = xyz.device
+ B, N, C = xyz.shape
+ centroids = torch.zeros(B, npoint, dtype=torch.long).to(device)
+ distance = torch.ones(B, N).to(device) * 1e10
+ farthest = torch.randint(0, N, (B,), dtype=torch.long).to(device)
+ batch_indices = torch.arange(B, dtype=torch.long).to(device)
+ for i in range(npoint):
+ centroids[:, i] = farthest
+ centroid = xyz[batch_indices, farthest, :].view(B, 1, 3)
+ dist = torch.sum((xyz - centroid) ** 2, -1)
+ mask = dist < distance
+ distance[mask] = dist[mask]
+ farthest = torch.max(distance, -1)[1]
+ return centroids
+
+
+def query_ball_point(radius, nsample, xyz, new_xyz):
+ """
+ Input:
+ radius: local region radius
+ nsample: max sample number in local region
+ xyz: all points, [B, N, 3]
+ new_xyz: query points, [B, S, 3]
+ Return:
+ group_idx: grouped points index, [B, S, nsample]
+ """
+ device = xyz.device
+ B, N, C = xyz.shape
+ _, S, _ = new_xyz.shape
+ group_idx = torch.arange(N, dtype=torch.long).to(device).view(1, 1, N).repeat([B, S, 1])
+ sqrdists = square_distance(new_xyz, xyz)
+ group_idx[sqrdists > radius ** 2] = N
+ group_idx = group_idx.sort(dim=-1)[0][:, :, :nsample]
+ group_first = group_idx[:, :, 0].view(B, S, 1).repeat([1, 1, nsample])
+ mask = group_idx == N
+ group_idx[mask] = group_first[mask]
+ return group_idx
+
+
+def sample_and_group(npoint, radius, nsample, xyz, points, returnfps=False):
+ """
+ Input:
+ npoint:
+ radius:
+ nsample:
+ xyz: input points position data, [B, N, 3]
+ points: input points data, [B, N, D]
+ Return:
+ new_xyz: sampled points position data, [B, npoint, nsample, 3]
+ new_points: sampled points data, [B, npoint, nsample, 3+D]
+ """
+ B, N, C = xyz.shape
+ S = npoint
+ fps_idx = farthest_point_sample(xyz, npoint) # [B, npoint, C]
+ new_xyz = index_points(xyz, fps_idx)
+ idx = query_ball_point(radius, nsample, xyz, new_xyz)
+ grouped_xyz = index_points(xyz, idx) # [B, npoint, nsample, C]
+ grouped_xyz_norm = grouped_xyz - new_xyz.view(B, S, 1, C)
+
+ if points is not None:
+ grouped_points = index_points(points, idx)
+ new_points = torch.cat([grouped_xyz_norm, grouped_points], dim=-1) # [B, npoint, nsample, C+D]
+ else:
+ new_points = grouped_xyz_norm
+ if returnfps:
+ return new_xyz, new_points, grouped_xyz, fps_idx
+ else:
+ return new_xyz, new_points
+
+
+def sample_and_group_all(xyz, points):
+ """
+ Input:
+ xyz: input points position data, [B, N, 3]
+ points: input points data, [B, N, D]
+ Return:
+ new_xyz: sampled points position data, [B, 1, 3]
+ new_points: sampled points data, [B, 1, N, 3+D]
+ """
+ device = xyz.device
+ B, N, C = xyz.shape
+ new_xyz = torch.zeros(B, 1, C).to(device)
+ grouped_xyz = xyz.view(B, 1, N, C)
+ if points is not None:
+ new_points = torch.cat([grouped_xyz, points.view(B, 1, N, -1)], dim=-1)
+ else:
+ new_points = grouped_xyz
+ return new_xyz, new_points
+
+
+class PointNetSetAbstraction(nn.Module):
+ def __init__(self, npoint, radius, nsample, in_channel, mlp, group_all):
+ super(PointNetSetAbstraction, self).__init__()
+ self.npoint = npoint
+ self.radius = radius
+ self.nsample = nsample
+ self.mlp_convs = nn.ModuleList()
+ self.mlp_bns = nn.ModuleList()
+ last_channel = in_channel
+ for out_channel in mlp:
+ self.mlp_convs.append(nn.Conv2d(last_channel, out_channel, 1))
+ self.mlp_bns.append(nn.BatchNorm2d(out_channel))
+ last_channel = out_channel
+ self.group_all = group_all
+
+ def forward(self, xyz, points):
+ """
+ Input:
+ xyz: input points position data, [B, C, N]
+ points: input points data, [B, D, N]
+ Return:
+ new_xyz: sampled points position data, [B, C, S]
+ new_points_concat: sample points feature data, [B, D', S]
+ """
+ xyz = xyz.permute(0, 2, 1)
+ if points is not None:
+ points = points.permute(0, 2, 1)
+
+ if self.group_all:
+ new_xyz, new_points = sample_and_group_all(xyz, points)
+ else:
+ new_xyz, new_points = sample_and_group(self.npoint, self.radius, self.nsample, xyz, points)
+ # new_xyz: sampled points position data, [B, npoint, C]
+ # new_points: sampled points data, [B, npoint, nsample, C+D]
+ new_points = new_points.permute(0, 3, 2, 1) # [B, C+D, nsample,npoint]
+ for i, conv in enumerate(self.mlp_convs):
+ bn = self.mlp_bns[i]
+ new_points = F.relu(bn(conv(new_points)))
+
+ new_points = torch.max(new_points, 2)[0]
+ new_xyz = new_xyz.permute(0, 2, 1)
+ return new_xyz, new_points
+
+
+class PointNetSetAbstractionMsg(nn.Module):
+ def __init__(self, npoint, radius_list, nsample_list, in_channel, mlp_list):
+ super(PointNetSetAbstractionMsg, self).__init__()
+ self.npoint = npoint
+ self.radius_list = radius_list
+ self.nsample_list = nsample_list
+ self.conv_blocks = nn.ModuleList()
+ self.bn_blocks = nn.ModuleList()
+ for i in range(len(mlp_list)):
+ convs = nn.ModuleList()
+ bns = nn.ModuleList()
+ last_channel = in_channel + 3
+ for out_channel in mlp_list[i]:
+ convs.append(nn.Conv2d(last_channel, out_channel, 1))
+ bns.append(nn.BatchNorm2d(out_channel))
+ last_channel = out_channel
+ self.conv_blocks.append(convs)
+ self.bn_blocks.append(bns)
+
+ def forward(self, xyz, points):
+ """
+ Input:
+ xyz: input points position data, [B, C, N]
+ points: input points data, [B, D, N]
+ Return:
+ new_xyz: sampled points position data, [B, C, S]
+ new_points_concat: sample points feature data, [B, D', S]
+ """
+ xyz = xyz.permute(0, 2, 1)
+ if points is not None:
+ points = points.permute(0, 2, 1)
+
+ B, N, C = xyz.shape
+ S = self.npoint
+ new_xyz = index_points(xyz, farthest_point_sample(xyz, S))
+ new_points_list = []
+ for i, radius in enumerate(self.radius_list):
+ K = self.nsample_list[i]
+ group_idx = query_ball_point(radius, K, xyz, new_xyz)
+ grouped_xyz = index_points(xyz, group_idx)
+ grouped_xyz -= new_xyz.view(B, S, 1, C)
+ if points is not None:
+ grouped_points = index_points(points, group_idx)
+ grouped_points = torch.cat([grouped_points, grouped_xyz], dim=-1)
+ else:
+ grouped_points = grouped_xyz
+
+ grouped_points = grouped_points.permute(0, 3, 2, 1) # [B, D, K, S]
+ for j in range(len(self.conv_blocks[i])):
+ conv = self.conv_blocks[i][j]
+ bn = self.bn_blocks[i][j]
+ grouped_points = F.relu(bn(conv(grouped_points)))
+ new_points = torch.max(grouped_points, 2)[0] # [B, D', S]
+ new_points_list.append(new_points)
+
+ new_xyz = new_xyz.permute(0, 2, 1)
+ new_points_concat = torch.cat(new_points_list, dim=1)
+ return new_xyz, new_points_concat
+
+
+class PointNetFeaturePropagation(nn.Module):
+ def __init__(self, in_channel, mlp):
+ super(PointNetFeaturePropagation, self).__init__()
+ self.mlp_convs = nn.ModuleList()
+ self.mlp_bns = nn.ModuleList()
+ last_channel = in_channel
+ for out_channel in mlp:
+ self.mlp_convs.append(nn.Conv1d(last_channel, out_channel, 1))
+ self.mlp_bns.append(nn.BatchNorm1d(out_channel))
+ last_channel = out_channel
+
+ def forward(self, xyz1, xyz2, points1, points2):
+ """
+ Input:
+ xyz1: input points position data, [B, C, N]
+ xyz2: sampled input points position data, [B, C, S]
+ points1: input points data, [B, D, N]
+ points2: input points data, [B, D, S]
+ Return:
+ new_points: upsampled points data, [B, D', N]
+ """
+ xyz1 = xyz1.permute(0, 2, 1)
+ xyz2 = xyz2.permute(0, 2, 1)
+
+ points2 = points2.permute(0, 2, 1)
+ B, N, C = xyz1.shape
+ _, S, _ = xyz2.shape
+
+ if S == 1:
+ interpolated_points = points2.repeat(1, N, 1)
+ else:
+ dists = square_distance(xyz1, xyz2)
+ dists, idx = dists.sort(dim=-1)
+ dists, idx = dists[:, :, :3], idx[:, :, :3] # [B, N, 3]
+
+ dist_recip = 1.0 / (dists + 1e-8)
+ norm = torch.sum(dist_recip, dim=2, keepdim=True)
+ weight = dist_recip / norm
+ interpolated_points = torch.sum(index_points(points2, idx) * weight.view(B, N, 3, 1), dim=2)
+
+ if points1 is not None:
+ points1 = points1.permute(0, 2, 1)
+ new_points = torch.cat([points1, interpolated_points], dim=-1)
+ else:
+ new_points = interpolated_points
+
+ new_points = new_points.permute(0, 2, 1)
+ for i, conv in enumerate(self.mlp_convs):
+ bn = self.mlp_bns[i]
+ new_points = F.relu(bn(conv(new_points)))
+ return new_points
+
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/part_seg/pointnet2_part_seg_msg/checkpoints/best_model.pth b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/part_seg/pointnet2_part_seg_msg/checkpoints/best_model.pth
new file mode 100644
index 0000000000..1c98bf10d7
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/part_seg/pointnet2_part_seg_msg/checkpoints/best_model.pth differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/part_seg/pointnet2_part_seg_msg/logs/pointnet2_part_seg_msg.txt b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/part_seg/pointnet2_part_seg_msg/logs/pointnet2_part_seg_msg.txt
new file mode 100644
index 0000000000..c095a78372
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/part_seg/pointnet2_part_seg_msg/logs/pointnet2_part_seg_msg.txt
@@ -0,0 +1,2581 @@
+2021-03-26 20:53:24,463 - Model - INFO - PARAMETER ...
+2021-03-26 20:53:24,463 - Model - INFO - Namespace(batch_size=16, decay_rate=0.0001, epoch=251, gpu='1', learning_rate=0.001, log_dir='pointnet2_part_seg_msg', lr_decay=0.5, model='pointnet2_part_seg_msg', normal=True, npoint=2048, optimizer='Adam', step_size=20)
+2021-03-26 20:53:24,908 - Model - INFO - The number of training data is: 13995
+2021-03-26 20:53:24,909 - Model - INFO - The number of test data is: 2874
+2021-03-26 20:53:45,536 - Model - INFO - No existing model, starting training from scratch...
+2021-03-26 20:53:45,540 - Model - INFO - Epoch 1 (1/251):
+2021-03-26 20:53:45,540 - Model - INFO - Learning rate:0.001000
+2021-03-26 21:00:28,956 - Model - INFO - Train accuracy is: 0.85584
+2021-03-26 21:01:38,416 - Model - INFO - eval mIoU of Airplane 0.740637
+2021-03-26 21:01:38,416 - Model - INFO - eval mIoU of Bag 0.447475
+2021-03-26 21:01:38,416 - Model - INFO - eval mIoU of Cap 0.686188
+2021-03-26 21:01:38,416 - Model - INFO - eval mIoU of Car 0.606154
+2021-03-26 21:01:38,416 - Model - INFO - eval mIoU of Chair 0.880540
+2021-03-26 21:01:38,416 - Model - INFO - eval mIoU of Earphone 0.618179
+2021-03-26 21:01:38,416 - Model - INFO - eval mIoU of Guitar 0.873399
+2021-03-26 21:01:38,416 - Model - INFO - eval mIoU of Knife 0.766975
+2021-03-26 21:01:38,416 - Model - INFO - eval mIoU of Lamp 0.736069
+2021-03-26 21:01:38,416 - Model - INFO - eval mIoU of Laptop 0.949603
+2021-03-26 21:01:38,416 - Model - INFO - eval mIoU of Motorbike 0.233225
+2021-03-26 21:01:38,416 - Model - INFO - eval mIoU of Mug 0.485859
+2021-03-26 21:01:38,416 - Model - INFO - eval mIoU of Pistol 0.577962
+2021-03-26 21:01:38,416 - Model - INFO - eval mIoU of Rocket 0.225274
+2021-03-26 21:01:38,416 - Model - INFO - eval mIoU of Skateboard 0.612827
+2021-03-26 21:01:38,416 - Model - INFO - eval mIoU of Table 0.761573
+2021-03-26 21:01:38,417 - Model - INFO - Epoch 1 test Accuracy: 0.907558 Class avg mIOU: 0.637621 Inctance avg mIOU: 0.766723
+2021-03-26 21:01:38,417 - Model - INFO - Save model...
+2021-03-26 21:01:38,417 - Model - INFO - Saving at log/part_seg/pointnet2_part_seg_msg/checkpoints/best_model.pth
+2021-03-26 21:01:38,487 - Model - INFO - Saving model....
+2021-03-26 21:01:38,487 - Model - INFO - Best accuracy is: 0.90756
+2021-03-26 21:01:38,487 - Model - INFO - Best class avg mIOU is: 0.63762
+2021-03-26 21:01:38,487 - Model - INFO - Best inctance avg mIOU is: 0.76672
+2021-03-26 21:01:38,487 - Model - INFO - Epoch 2 (2/251):
+2021-03-26 21:01:38,487 - Model - INFO - Learning rate:0.001000
+2021-03-26 21:08:20,792 - Model - INFO - Train accuracy is: 0.91860
+2021-03-26 21:09:26,340 - Model - INFO - eval mIoU of Airplane 0.738976
+2021-03-26 21:09:26,340 - Model - INFO - eval mIoU of Bag 0.673453
+2021-03-26 21:09:26,340 - Model - INFO - eval mIoU of Cap 0.803919
+2021-03-26 21:09:26,340 - Model - INFO - eval mIoU of Car 0.630208
+2021-03-26 21:09:26,340 - Model - INFO - eval mIoU of Chair 0.879868
+2021-03-26 21:09:26,340 - Model - INFO - eval mIoU of Earphone 0.706501
+2021-03-26 21:09:26,340 - Model - INFO - eval mIoU of Guitar 0.890402
+2021-03-26 21:09:26,341 - Model - INFO - eval mIoU of Knife 0.832821
+2021-03-26 21:09:26,341 - Model - INFO - eval mIoU of Lamp 0.760572
+2021-03-26 21:09:26,341 - Model - INFO - eval mIoU of Laptop 0.943843
+2021-03-26 21:09:26,341 - Model - INFO - eval mIoU of Motorbike 0.349938
+2021-03-26 21:09:26,341 - Model - INFO - eval mIoU of Mug 0.867917
+2021-03-26 21:09:26,341 - Model - INFO - eval mIoU of Pistol 0.606346
+2021-03-26 21:09:26,341 - Model - INFO - eval mIoU of Rocket 0.395416
+2021-03-26 21:09:26,341 - Model - INFO - eval mIoU of Skateboard 0.527164
+2021-03-26 21:09:26,341 - Model - INFO - eval mIoU of Table 0.798423
+2021-03-26 21:09:26,341 - Model - INFO - Epoch 2 test Accuracy: 0.917032 Class avg mIOU: 0.712860 Inctance avg mIOU: 0.792927
+2021-03-26 21:09:26,341 - Model - INFO - Save model...
+2021-03-26 21:09:26,341 - Model - INFO - Saving at log/part_seg/pointnet2_part_seg_msg/checkpoints/best_model.pth
+2021-03-26 21:09:26,427 - Model - INFO - Saving model....
+2021-03-26 21:09:26,427 - Model - INFO - Best accuracy is: 0.91703
+2021-03-26 21:09:26,427 - Model - INFO - Best class avg mIOU is: 0.71286
+2021-03-26 21:09:26,427 - Model - INFO - Best inctance avg mIOU is: 0.79293
+2021-03-26 21:09:26,427 - Model - INFO - Epoch 3 (3/251):
+2021-03-26 21:09:26,427 - Model - INFO - Learning rate:0.001000
+2021-03-26 21:16:10,713 - Model - INFO - Train accuracy is: 0.92539
+2021-03-26 21:17:15,482 - Model - INFO - eval mIoU of Airplane 0.778591
+2021-03-26 21:17:15,482 - Model - INFO - eval mIoU of Bag 0.641186
+2021-03-26 21:17:15,482 - Model - INFO - eval mIoU of Cap 0.841365
+2021-03-26 21:17:15,482 - Model - INFO - eval mIoU of Car 0.692556
+2021-03-26 21:17:15,482 - Model - INFO - eval mIoU of Chair 0.888945
+2021-03-26 21:17:15,482 - Model - INFO - eval mIoU of Earphone 0.696372
+2021-03-26 21:17:15,482 - Model - INFO - eval mIoU of Guitar 0.881375
+2021-03-26 21:17:15,482 - Model - INFO - eval mIoU of Knife 0.846422
+2021-03-26 21:17:15,483 - Model - INFO - eval mIoU of Lamp 0.787109
+2021-03-26 21:17:15,483 - Model - INFO - eval mIoU of Laptop 0.935915
+2021-03-26 21:17:15,483 - Model - INFO - eval mIoU of Motorbike 0.447775
+2021-03-26 21:17:15,483 - Model - INFO - eval mIoU of Mug 0.904444
+2021-03-26 21:17:15,483 - Model - INFO - eval mIoU of Pistol 0.753458
+2021-03-26 21:17:15,483 - Model - INFO - eval mIoU of Rocket 0.276124
+2021-03-26 21:17:15,483 - Model - INFO - eval mIoU of Skateboard 0.683700
+2021-03-26 21:17:15,483 - Model - INFO - eval mIoU of Table 0.779553
+2021-03-26 21:17:15,483 - Model - INFO - Epoch 3 test Accuracy: 0.926109 Class avg mIOU: 0.739681 Inctance avg mIOU: 0.805600
+2021-03-26 21:17:15,484 - Model - INFO - Save model...
+2021-03-26 21:17:15,484 - Model - INFO - Saving at log/part_seg/pointnet2_part_seg_msg/checkpoints/best_model.pth
+2021-03-26 21:17:15,573 - Model - INFO - Saving model....
+2021-03-26 21:17:15,574 - Model - INFO - Best accuracy is: 0.92611
+2021-03-26 21:17:15,574 - Model - INFO - Best class avg mIOU is: 0.73968
+2021-03-26 21:17:15,574 - Model - INFO - Best inctance avg mIOU is: 0.80560
+2021-03-26 21:17:15,574 - Model - INFO - Epoch 4 (4/251):
+2021-03-26 21:17:15,574 - Model - INFO - Learning rate:0.001000
+2021-03-26 21:24:02,350 - Model - INFO - Train accuracy is: 0.92772
+2021-03-26 21:25:10,570 - Model - INFO - eval mIoU of Airplane 0.763583
+2021-03-26 21:25:10,570 - Model - INFO - eval mIoU of Bag 0.768957
+2021-03-26 21:25:10,570 - Model - INFO - eval mIoU of Cap 0.768901
+2021-03-26 21:25:10,571 - Model - INFO - eval mIoU of Car 0.686037
+2021-03-26 21:25:10,571 - Model - INFO - eval mIoU of Chair 0.886481
+2021-03-26 21:25:10,571 - Model - INFO - eval mIoU of Earphone 0.630729
+2021-03-26 21:25:10,571 - Model - INFO - eval mIoU of Guitar 0.886434
+2021-03-26 21:25:10,571 - Model - INFO - eval mIoU of Knife 0.792723
+2021-03-26 21:25:10,571 - Model - INFO - eval mIoU of Lamp 0.784850
+2021-03-26 21:25:10,571 - Model - INFO - eval mIoU of Laptop 0.946401
+2021-03-26 21:25:10,571 - Model - INFO - eval mIoU of Motorbike 0.459209
+2021-03-26 21:25:10,571 - Model - INFO - eval mIoU of Mug 0.866618
+2021-03-26 21:25:10,571 - Model - INFO - eval mIoU of Pistol 0.782040
+2021-03-26 21:25:10,571 - Model - INFO - eval mIoU of Rocket 0.388919
+2021-03-26 21:25:10,571 - Model - INFO - eval mIoU of Skateboard 0.705302
+2021-03-26 21:25:10,571 - Model - INFO - eval mIoU of Table 0.802097
+2021-03-26 21:25:10,572 - Model - INFO - Epoch 4 test Accuracy: 0.922030 Class avg mIOU: 0.744955 Inctance avg mIOU: 0.809242
+2021-03-26 21:25:10,572 - Model - INFO - Save model...
+2021-03-26 21:25:10,572 - Model - INFO - Saving at log/part_seg/pointnet2_part_seg_msg/checkpoints/best_model.pth
+2021-03-26 21:25:10,666 - Model - INFO - Saving model....
+2021-03-26 21:25:10,667 - Model - INFO - Best accuracy is: 0.92611
+2021-03-26 21:25:10,667 - Model - INFO - Best class avg mIOU is: 0.74495
+2021-03-26 21:25:10,667 - Model - INFO - Best inctance avg mIOU is: 0.80924
+2021-03-26 21:25:10,667 - Model - INFO - Epoch 5 (5/251):
+2021-03-26 21:25:10,667 - Model - INFO - Learning rate:0.001000
+2021-03-26 21:31:54,967 - Model - INFO - Train accuracy is: 0.92971
+2021-03-26 21:33:03,202 - Model - INFO - eval mIoU of Airplane 0.782533
+2021-03-26 21:33:03,202 - Model - INFO - eval mIoU of Bag 0.465270
+2021-03-26 21:33:03,202 - Model - INFO - eval mIoU of Cap 0.808007
+2021-03-26 21:33:03,202 - Model - INFO - eval mIoU of Car 0.717015
+2021-03-26 21:33:03,203 - Model - INFO - eval mIoU of Chair 0.882974
+2021-03-26 21:33:03,203 - Model - INFO - eval mIoU of Earphone 0.629165
+2021-03-26 21:33:03,203 - Model - INFO - eval mIoU of Guitar 0.877558
+2021-03-26 21:33:03,203 - Model - INFO - eval mIoU of Knife 0.848072
+2021-03-26 21:33:03,203 - Model - INFO - eval mIoU of Lamp 0.798058
+2021-03-26 21:33:03,203 - Model - INFO - eval mIoU of Laptop 0.952391
+2021-03-26 21:33:03,203 - Model - INFO - eval mIoU of Motorbike 0.479104
+2021-03-26 21:33:03,203 - Model - INFO - eval mIoU of Mug 0.929549
+2021-03-26 21:33:03,203 - Model - INFO - eval mIoU of Pistol 0.778823
+2021-03-26 21:33:03,203 - Model - INFO - eval mIoU of Rocket 0.566701
+2021-03-26 21:33:03,203 - Model - INFO - eval mIoU of Skateboard 0.683715
+2021-03-26 21:33:03,203 - Model - INFO - eval mIoU of Table 0.797272
+2021-03-26 21:33:03,204 - Model - INFO - Epoch 5 test Accuracy: 0.928084 Class avg mIOU: 0.749763 Inctance avg mIOU: 0.813756
+2021-03-26 21:33:03,204 - Model - INFO - Save model...
+2021-03-26 21:33:03,204 - Model - INFO - Saving at log/part_seg/pointnet2_part_seg_msg/checkpoints/best_model.pth
+2021-03-26 21:33:03,307 - Model - INFO - Saving model....
+2021-03-26 21:33:03,307 - Model - INFO - Best accuracy is: 0.92808
+2021-03-26 21:33:03,308 - Model - INFO - Best class avg mIOU is: 0.74976
+2021-03-26 21:33:03,308 - Model - INFO - Best inctance avg mIOU is: 0.81376
+2021-03-26 21:33:03,308 - Model - INFO - Epoch 6 (6/251):
+2021-03-26 21:33:03,308 - Model - INFO - Learning rate:0.001000
+2021-03-26 21:39:48,221 - Model - INFO - Train accuracy is: 0.93100
+2021-03-26 21:40:58,313 - Model - INFO - eval mIoU of Airplane 0.783413
+2021-03-26 21:40:58,313 - Model - INFO - eval mIoU of Bag 0.762083
+2021-03-26 21:40:58,313 - Model - INFO - eval mIoU of Cap 0.837606
+2021-03-26 21:40:58,313 - Model - INFO - eval mIoU of Car 0.692932
+2021-03-26 21:40:58,313 - Model - INFO - eval mIoU of Chair 0.893316
+2021-03-26 21:40:58,314 - Model - INFO - eval mIoU of Earphone 0.731681
+2021-03-26 21:40:58,314 - Model - INFO - eval mIoU of Guitar 0.897153
+2021-03-26 21:40:58,314 - Model - INFO - eval mIoU of Knife 0.827572
+2021-03-26 21:40:58,314 - Model - INFO - eval mIoU of Lamp 0.818571
+2021-03-26 21:40:58,314 - Model - INFO - eval mIoU of Laptop 0.947399
+2021-03-26 21:40:58,314 - Model - INFO - eval mIoU of Motorbike 0.455779
+2021-03-26 21:40:58,314 - Model - INFO - eval mIoU of Mug 0.929084
+2021-03-26 21:40:58,314 - Model - INFO - eval mIoU of Pistol 0.753836
+2021-03-26 21:40:58,314 - Model - INFO - eval mIoU of Rocket 0.406127
+2021-03-26 21:40:58,314 - Model - INFO - eval mIoU of Skateboard 0.727035
+2021-03-26 21:40:58,314 - Model - INFO - eval mIoU of Table 0.806986
+2021-03-26 21:40:58,314 - Model - INFO - Epoch 6 test Accuracy: 0.930875 Class avg mIOU: 0.766911 Inctance avg mIOU: 0.821399
+2021-03-26 21:40:58,314 - Model - INFO - Save model...
+2021-03-26 21:40:58,315 - Model - INFO - Saving at log/part_seg/pointnet2_part_seg_msg/checkpoints/best_model.pth
+2021-03-26 21:40:58,386 - Model - INFO - Saving model....
+2021-03-26 21:40:58,386 - Model - INFO - Best accuracy is: 0.93088
+2021-03-26 21:40:58,386 - Model - INFO - Best class avg mIOU is: 0.76691
+2021-03-26 21:40:58,386 - Model - INFO - Best inctance avg mIOU is: 0.82140
+2021-03-26 21:40:58,386 - Model - INFO - Epoch 7 (7/251):
+2021-03-26 21:40:58,386 - Model - INFO - Learning rate:0.001000
+2021-03-26 21:47:45,585 - Model - INFO - Train accuracy is: 0.93250
+2021-03-26 21:48:53,082 - Model - INFO - eval mIoU of Airplane 0.727965
+2021-03-26 21:48:53,082 - Model - INFO - eval mIoU of Bag 0.808763
+2021-03-26 21:48:53,083 - Model - INFO - eval mIoU of Cap 0.853021
+2021-03-26 21:48:53,083 - Model - INFO - eval mIoU of Car 0.711732
+2021-03-26 21:48:53,083 - Model - INFO - eval mIoU of Chair 0.877806
+2021-03-26 21:48:53,083 - Model - INFO - eval mIoU of Earphone 0.728329
+2021-03-26 21:48:53,083 - Model - INFO - eval mIoU of Guitar 0.900220
+2021-03-26 21:48:53,083 - Model - INFO - eval mIoU of Knife 0.828772
+2021-03-26 21:48:53,083 - Model - INFO - eval mIoU of Lamp 0.805551
+2021-03-26 21:48:53,083 - Model - INFO - eval mIoU of Laptop 0.955737
+2021-03-26 21:48:53,083 - Model - INFO - eval mIoU of Motorbike 0.258119
+2021-03-26 21:48:53,083 - Model - INFO - eval mIoU of Mug 0.919270
+2021-03-26 21:48:53,083 - Model - INFO - eval mIoU of Pistol 0.704939
+2021-03-26 21:48:53,083 - Model - INFO - eval mIoU of Rocket 0.480411
+2021-03-26 21:48:53,083 - Model - INFO - eval mIoU of Skateboard 0.690808
+2021-03-26 21:48:53,084 - Model - INFO - eval mIoU of Table 0.802941
+2021-03-26 21:48:53,084 - Model - INFO - Epoch 7 test Accuracy: 0.913710 Class avg mIOU: 0.753399 Inctance avg mIOU: 0.805813
+2021-03-26 21:48:53,084 - Model - INFO - Best accuracy is: 0.93088
+2021-03-26 21:48:53,084 - Model - INFO - Best class avg mIOU is: 0.76691
+2021-03-26 21:48:53,084 - Model - INFO - Best inctance avg mIOU is: 0.82140
+2021-03-26 21:48:53,084 - Model - INFO - Epoch 8 (8/251):
+2021-03-26 21:48:53,085 - Model - INFO - Learning rate:0.001000
+2021-03-26 21:55:31,879 - Model - INFO - Train accuracy is: 0.93367
+2021-03-26 21:56:38,413 - Model - INFO - eval mIoU of Airplane 0.759942
+2021-03-26 21:56:38,413 - Model - INFO - eval mIoU of Bag 0.603996
+2021-03-26 21:56:38,413 - Model - INFO - eval mIoU of Cap 0.833260
+2021-03-26 21:56:38,413 - Model - INFO - eval mIoU of Car 0.729088
+2021-03-26 21:56:38,413 - Model - INFO - eval mIoU of Chair 0.890726
+2021-03-26 21:56:38,413 - Model - INFO - eval mIoU of Earphone 0.299873
+2021-03-26 21:56:38,414 - Model - INFO - eval mIoU of Guitar 0.890145
+2021-03-26 21:56:38,414 - Model - INFO - eval mIoU of Knife 0.811348
+2021-03-26 21:56:38,414 - Model - INFO - eval mIoU of Lamp 0.814826
+2021-03-26 21:56:38,414 - Model - INFO - eval mIoU of Laptop 0.953665
+2021-03-26 21:56:38,414 - Model - INFO - eval mIoU of Motorbike 0.312201
+2021-03-26 21:56:38,414 - Model - INFO - eval mIoU of Mug 0.922579
+2021-03-26 21:56:38,414 - Model - INFO - eval mIoU of Pistol 0.761945
+2021-03-26 21:56:38,414 - Model - INFO - eval mIoU of Rocket 0.422997
+2021-03-26 21:56:38,414 - Model - INFO - eval mIoU of Skateboard 0.706082
+2021-03-26 21:56:38,414 - Model - INFO - eval mIoU of Table 0.801482
+2021-03-26 21:56:38,414 - Model - INFO - Epoch 8 test Accuracy: 0.923675 Class avg mIOU: 0.719635 Inctance avg mIOU: 0.811757
+2021-03-26 21:56:38,415 - Model - INFO - Best accuracy is: 0.93088
+2021-03-26 21:56:38,415 - Model - INFO - Best class avg mIOU is: 0.76691
+2021-03-26 21:56:38,415 - Model - INFO - Best inctance avg mIOU is: 0.82140
+2021-03-26 21:56:38,415 - Model - INFO - Epoch 9 (9/251):
+2021-03-26 21:56:38,415 - Model - INFO - Learning rate:0.001000
+2021-03-26 22:03:25,945 - Model - INFO - Train accuracy is: 0.93319
+2021-03-26 22:04:34,910 - Model - INFO - eval mIoU of Airplane 0.758041
+2021-03-26 22:04:34,910 - Model - INFO - eval mIoU of Bag 0.564823
+2021-03-26 22:04:34,910 - Model - INFO - eval mIoU of Cap 0.831594
+2021-03-26 22:04:34,910 - Model - INFO - eval mIoU of Car 0.641035
+2021-03-26 22:04:34,910 - Model - INFO - eval mIoU of Chair 0.883484
+2021-03-26 22:04:34,910 - Model - INFO - eval mIoU of Earphone 0.469128
+2021-03-26 22:04:34,910 - Model - INFO - eval mIoU of Guitar 0.870854
+2021-03-26 22:04:34,910 - Model - INFO - eval mIoU of Knife 0.853204
+2021-03-26 22:04:34,910 - Model - INFO - eval mIoU of Lamp 0.816409
+2021-03-26 22:04:34,910 - Model - INFO - eval mIoU of Laptop 0.951851
+2021-03-26 22:04:34,911 - Model - INFO - eval mIoU of Motorbike 0.433620
+2021-03-26 22:04:34,911 - Model - INFO - eval mIoU of Mug 0.939111
+2021-03-26 22:04:34,911 - Model - INFO - eval mIoU of Pistol 0.708826
+2021-03-26 22:04:34,911 - Model - INFO - eval mIoU of Rocket 0.244396
+2021-03-26 22:04:34,911 - Model - INFO - eval mIoU of Skateboard 0.719199
+2021-03-26 22:04:34,911 - Model - INFO - eval mIoU of Table 0.787895
+2021-03-26 22:04:34,911 - Model - INFO - Epoch 9 test Accuracy: 0.920699 Class avg mIOU: 0.717092 Inctance avg mIOU: 0.802694
+2021-03-26 22:04:34,911 - Model - INFO - Best accuracy is: 0.93088
+2021-03-26 22:04:34,911 - Model - INFO - Best class avg mIOU is: 0.76691
+2021-03-26 22:04:34,912 - Model - INFO - Best inctance avg mIOU is: 0.82140
+2021-03-26 22:04:34,912 - Model - INFO - Epoch 10 (10/251):
+2021-03-26 22:04:34,912 - Model - INFO - Learning rate:0.001000
+2021-03-26 22:11:23,260 - Model - INFO - Train accuracy is: 0.93551
+2021-03-26 22:12:32,676 - Model - INFO - eval mIoU of Airplane 0.756522
+2021-03-26 22:12:32,676 - Model - INFO - eval mIoU of Bag 0.780778
+2021-03-26 22:12:32,676 - Model - INFO - eval mIoU of Cap 0.365345
+2021-03-26 22:12:32,676 - Model - INFO - eval mIoU of Car 0.618684
+2021-03-26 22:12:32,676 - Model - INFO - eval mIoU of Chair 0.887532
+2021-03-26 22:12:32,676 - Model - INFO - eval mIoU of Earphone 0.708560
+2021-03-26 22:12:32,677 - Model - INFO - eval mIoU of Guitar 0.896410
+2021-03-26 22:12:32,677 - Model - INFO - eval mIoU of Knife 0.854511
+2021-03-26 22:12:32,677 - Model - INFO - eval mIoU of Lamp 0.776362
+2021-03-26 22:12:32,677 - Model - INFO - eval mIoU of Laptop 0.950700
+2021-03-26 22:12:32,677 - Model - INFO - eval mIoU of Motorbike 0.377096
+2021-03-26 22:12:32,677 - Model - INFO - eval mIoU of Mug 0.928353
+2021-03-26 22:12:32,677 - Model - INFO - eval mIoU of Pistol 0.740682
+2021-03-26 22:12:32,677 - Model - INFO - eval mIoU of Rocket 0.280228
+2021-03-26 22:12:32,677 - Model - INFO - eval mIoU of Skateboard 0.510672
+2021-03-26 22:12:32,677 - Model - INFO - eval mIoU of Table 0.799471
+2021-03-26 22:12:32,678 - Model - INFO - Epoch 10 test Accuracy: 0.922848 Class avg mIOU: 0.701994 Inctance avg mIOU: 0.800800
+2021-03-26 22:12:32,678 - Model - INFO - Best accuracy is: 0.93088
+2021-03-26 22:12:32,678 - Model - INFO - Best class avg mIOU is: 0.76691
+2021-03-26 22:12:32,678 - Model - INFO - Best inctance avg mIOU is: 0.82140
+2021-03-26 22:12:32,678 - Model - INFO - Epoch 11 (11/251):
+2021-03-26 22:12:32,678 - Model - INFO - Learning rate:0.001000
+2021-03-26 22:19:13,650 - Model - INFO - Train accuracy is: 0.93557
+2021-03-26 22:20:22,509 - Model - INFO - eval mIoU of Airplane 0.768121
+2021-03-26 22:20:22,509 - Model - INFO - eval mIoU of Bag 0.726198
+2021-03-26 22:20:22,510 - Model - INFO - eval mIoU of Cap 0.763957
+2021-03-26 22:20:22,510 - Model - INFO - eval mIoU of Car 0.732334
+2021-03-26 22:20:22,510 - Model - INFO - eval mIoU of Chair 0.893504
+2021-03-26 22:20:22,510 - Model - INFO - eval mIoU of Earphone 0.613784
+2021-03-26 22:20:22,510 - Model - INFO - eval mIoU of Guitar 0.903026
+2021-03-26 22:20:22,510 - Model - INFO - eval mIoU of Knife 0.846143
+2021-03-26 22:20:22,510 - Model - INFO - eval mIoU of Lamp 0.814799
+2021-03-26 22:20:22,510 - Model - INFO - eval mIoU of Laptop 0.946501
+2021-03-26 22:20:22,510 - Model - INFO - eval mIoU of Motorbike 0.495876
+2021-03-26 22:20:22,510 - Model - INFO - eval mIoU of Mug 0.903270
+2021-03-26 22:20:22,510 - Model - INFO - eval mIoU of Pistol 0.802752
+2021-03-26 22:20:22,510 - Model - INFO - eval mIoU of Rocket 0.560358
+2021-03-26 22:20:22,510 - Model - INFO - eval mIoU of Skateboard 0.729253
+2021-03-26 22:20:22,510 - Model - INFO - eval mIoU of Table 0.815755
+2021-03-26 22:20:22,511 - Model - INFO - Epoch 11 test Accuracy: 0.931766 Class avg mIOU: 0.769727 Inctance avg mIOU: 0.825581
+2021-03-26 22:20:22,511 - Model - INFO - Save model...
+2021-03-26 22:20:22,511 - Model - INFO - Saving at log/part_seg/pointnet2_part_seg_msg/checkpoints/best_model.pth
+2021-03-26 22:20:22,579 - Model - INFO - Saving model....
+2021-03-26 22:20:22,580 - Model - INFO - Best accuracy is: 0.93177
+2021-03-26 22:20:22,580 - Model - INFO - Best class avg mIOU is: 0.76973
+2021-03-26 22:20:22,580 - Model - INFO - Best inctance avg mIOU is: 0.82558
+2021-03-26 22:20:22,580 - Model - INFO - Epoch 12 (12/251):
+2021-03-26 22:20:22,580 - Model - INFO - Learning rate:0.001000
+2021-03-26 22:27:09,690 - Model - INFO - Train accuracy is: 0.93671
+2021-03-26 22:28:16,335 - Model - INFO - eval mIoU of Airplane 0.783527
+2021-03-26 22:28:16,336 - Model - INFO - eval mIoU of Bag 0.805258
+2021-03-26 22:28:16,336 - Model - INFO - eval mIoU of Cap 0.772463
+2021-03-26 22:28:16,336 - Model - INFO - eval mIoU of Car 0.741072
+2021-03-26 22:28:16,336 - Model - INFO - eval mIoU of Chair 0.897871
+2021-03-26 22:28:16,336 - Model - INFO - eval mIoU of Earphone 0.740139
+2021-03-26 22:28:16,336 - Model - INFO - eval mIoU of Guitar 0.895531
+2021-03-26 22:28:16,336 - Model - INFO - eval mIoU of Knife 0.841313
+2021-03-26 22:28:16,336 - Model - INFO - eval mIoU of Lamp 0.809092
+2021-03-26 22:28:16,336 - Model - INFO - eval mIoU of Laptop 0.943191
+2021-03-26 22:28:16,336 - Model - INFO - eval mIoU of Motorbike 0.550725
+2021-03-26 22:28:16,336 - Model - INFO - eval mIoU of Mug 0.934728
+2021-03-26 22:28:16,336 - Model - INFO - eval mIoU of Pistol 0.802751
+2021-03-26 22:28:16,336 - Model - INFO - eval mIoU of Rocket 0.588875
+2021-03-26 22:28:16,336 - Model - INFO - eval mIoU of Skateboard 0.678148
+2021-03-26 22:28:16,336 - Model - INFO - eval mIoU of Table 0.815644
+2021-03-26 22:28:16,336 - Model - INFO - Epoch 12 test Accuracy: 0.934027 Class avg mIOU: 0.787520 Inctance avg mIOU: 0.829704
+2021-03-26 22:28:16,336 - Model - INFO - Save model...
+2021-03-26 22:28:16,337 - Model - INFO - Saving at log/part_seg/pointnet2_part_seg_msg/checkpoints/best_model.pth
+2021-03-26 22:28:16,803 - Model - INFO - Saving model....
+2021-03-26 22:28:16,803 - Model - INFO - Best accuracy is: 0.93403
+2021-03-26 22:28:16,803 - Model - INFO - Best class avg mIOU is: 0.78752
+2021-03-26 22:28:16,803 - Model - INFO - Best inctance avg mIOU is: 0.82970
+2021-03-26 22:28:16,803 - Model - INFO - Epoch 13 (13/251):
+2021-03-26 22:28:16,803 - Model - INFO - Learning rate:0.001000
+2021-03-26 22:35:01,172 - Model - INFO - Train accuracy is: 0.93606
+2021-03-26 22:36:06,970 - Model - INFO - eval mIoU of Airplane 0.799641
+2021-03-26 22:36:06,970 - Model - INFO - eval mIoU of Bag 0.815529
+2021-03-26 22:36:06,970 - Model - INFO - eval mIoU of Cap 0.814104
+2021-03-26 22:36:06,970 - Model - INFO - eval mIoU of Car 0.701300
+2021-03-26 22:36:06,970 - Model - INFO - eval mIoU of Chair 0.884008
+2021-03-26 22:36:06,970 - Model - INFO - eval mIoU of Earphone 0.774564
+2021-03-26 22:36:06,971 - Model - INFO - eval mIoU of Guitar 0.893939
+2021-03-26 22:36:06,971 - Model - INFO - eval mIoU of Knife 0.846472
+2021-03-26 22:36:06,971 - Model - INFO - eval mIoU of Lamp 0.820813
+2021-03-26 22:36:06,971 - Model - INFO - eval mIoU of Laptop 0.954320
+2021-03-26 22:36:06,971 - Model - INFO - eval mIoU of Motorbike 0.521525
+2021-03-26 22:36:06,971 - Model - INFO - eval mIoU of Mug 0.934844
+2021-03-26 22:36:06,971 - Model - INFO - eval mIoU of Pistol 0.753472
+2021-03-26 22:36:06,971 - Model - INFO - eval mIoU of Rocket 0.556585
+2021-03-26 22:36:06,971 - Model - INFO - eval mIoU of Skateboard 0.751075
+2021-03-26 22:36:06,971 - Model - INFO - eval mIoU of Table 0.808956
+2021-03-26 22:36:06,971 - Model - INFO - Epoch 13 test Accuracy: 0.934363 Class avg mIOU: 0.789447 Inctance avg mIOU: 0.825361
+2021-03-26 22:36:06,971 - Model - INFO - Best accuracy is: 0.93436
+2021-03-26 22:36:06,972 - Model - INFO - Best class avg mIOU is: 0.78945
+2021-03-26 22:36:06,972 - Model - INFO - Best inctance avg mIOU is: 0.82970
+2021-03-26 22:36:06,972 - Model - INFO - Epoch 14 (14/251):
+2021-03-26 22:36:06,972 - Model - INFO - Learning rate:0.001000
+2021-03-26 22:42:51,182 - Model - INFO - Train accuracy is: 0.93737
+2021-03-26 22:43:59,963 - Model - INFO - eval mIoU of Airplane 0.775063
+2021-03-26 22:43:59,963 - Model - INFO - eval mIoU of Bag 0.775240
+2021-03-26 22:43:59,963 - Model - INFO - eval mIoU of Cap 0.789992
+2021-03-26 22:43:59,963 - Model - INFO - eval mIoU of Car 0.724570
+2021-03-26 22:43:59,963 - Model - INFO - eval mIoU of Chair 0.891457
+2021-03-26 22:43:59,963 - Model - INFO - eval mIoU of Earphone 0.713798
+2021-03-26 22:43:59,963 - Model - INFO - eval mIoU of Guitar 0.895709
+2021-03-26 22:43:59,963 - Model - INFO - eval mIoU of Knife 0.845364
+2021-03-26 22:43:59,963 - Model - INFO - eval mIoU of Lamp 0.822234
+2021-03-26 22:43:59,963 - Model - INFO - eval mIoU of Laptop 0.953483
+2021-03-26 22:43:59,964 - Model - INFO - eval mIoU of Motorbike 0.542634
+2021-03-26 22:43:59,964 - Model - INFO - eval mIoU of Mug 0.930872
+2021-03-26 22:43:59,964 - Model - INFO - eval mIoU of Pistol 0.797331
+2021-03-26 22:43:59,964 - Model - INFO - eval mIoU of Rocket 0.594262
+2021-03-26 22:43:59,964 - Model - INFO - eval mIoU of Skateboard 0.679594
+2021-03-26 22:43:59,964 - Model - INFO - eval mIoU of Table 0.805904
+2021-03-26 22:43:59,964 - Model - INFO - Epoch 14 test Accuracy: 0.931214 Class avg mIOU: 0.783594 Inctance avg mIOU: 0.824628
+2021-03-26 22:43:59,964 - Model - INFO - Best accuracy is: 0.93436
+2021-03-26 22:43:59,964 - Model - INFO - Best class avg mIOU is: 0.78945
+2021-03-26 22:43:59,964 - Model - INFO - Best inctance avg mIOU is: 0.82970
+2021-03-26 22:43:59,964 - Model - INFO - Epoch 15 (15/251):
+2021-03-26 22:43:59,964 - Model - INFO - Learning rate:0.001000
+2021-03-26 22:50:43,507 - Model - INFO - Train accuracy is: 0.93729
+2021-03-26 22:51:50,809 - Model - INFO - eval mIoU of Airplane 0.770780
+2021-03-26 22:51:50,810 - Model - INFO - eval mIoU of Bag 0.799611
+2021-03-26 22:51:50,810 - Model - INFO - eval mIoU of Cap 0.738233
+2021-03-26 22:51:50,810 - Model - INFO - eval mIoU of Car 0.721983
+2021-03-26 22:51:50,810 - Model - INFO - eval mIoU of Chair 0.876529
+2021-03-26 22:51:50,810 - Model - INFO - eval mIoU of Earphone 0.786310
+2021-03-26 22:51:50,810 - Model - INFO - eval mIoU of Guitar 0.900352
+2021-03-26 22:51:50,810 - Model - INFO - eval mIoU of Knife 0.766432
+2021-03-26 22:51:50,811 - Model - INFO - eval mIoU of Lamp 0.835363
+2021-03-26 22:51:50,811 - Model - INFO - eval mIoU of Laptop 0.953719
+2021-03-26 22:51:50,811 - Model - INFO - eval mIoU of Motorbike 0.359383
+2021-03-26 22:51:50,811 - Model - INFO - eval mIoU of Mug 0.948168
+2021-03-26 22:51:50,811 - Model - INFO - eval mIoU of Pistol 0.783119
+2021-03-26 22:51:50,811 - Model - INFO - eval mIoU of Rocket 0.519037
+2021-03-26 22:51:50,811 - Model - INFO - eval mIoU of Skateboard 0.737894
+2021-03-26 22:51:50,811 - Model - INFO - eval mIoU of Table 0.816374
+2021-03-26 22:51:50,812 - Model - INFO - Epoch 15 test Accuracy: 0.929209 Class avg mIOU: 0.769581 Inctance avg mIOU: 0.820131
+2021-03-26 22:51:50,812 - Model - INFO - Best accuracy is: 0.93436
+2021-03-26 22:51:50,812 - Model - INFO - Best class avg mIOU is: 0.78945
+2021-03-26 22:51:50,812 - Model - INFO - Best inctance avg mIOU is: 0.82970
+2021-03-26 22:51:50,812 - Model - INFO - Epoch 16 (16/251):
+2021-03-26 22:51:50,812 - Model - INFO - Learning rate:0.001000
+2021-03-26 22:58:29,091 - Model - INFO - Train accuracy is: 0.93837
+2021-03-26 22:59:34,430 - Model - INFO - eval mIoU of Airplane 0.785821
+2021-03-26 22:59:34,431 - Model - INFO - eval mIoU of Bag 0.805895
+2021-03-26 22:59:34,431 - Model - INFO - eval mIoU of Cap 0.818290
+2021-03-26 22:59:34,431 - Model - INFO - eval mIoU of Car 0.726432
+2021-03-26 22:59:34,431 - Model - INFO - eval mIoU of Chair 0.879940
+2021-03-26 22:59:34,431 - Model - INFO - eval mIoU of Earphone 0.628770
+2021-03-26 22:59:34,431 - Model - INFO - eval mIoU of Guitar 0.896484
+2021-03-26 22:59:34,431 - Model - INFO - eval mIoU of Knife 0.855673
+2021-03-26 22:59:34,431 - Model - INFO - eval mIoU of Lamp 0.813760
+2021-03-26 22:59:34,431 - Model - INFO - eval mIoU of Laptop 0.949806
+2021-03-26 22:59:34,431 - Model - INFO - eval mIoU of Motorbike 0.484054
+2021-03-26 22:59:34,432 - Model - INFO - eval mIoU of Mug 0.947649
+2021-03-26 22:59:34,432 - Model - INFO - eval mIoU of Pistol 0.768556
+2021-03-26 22:59:34,432 - Model - INFO - eval mIoU of Rocket 0.470199
+2021-03-26 22:59:34,432 - Model - INFO - eval mIoU of Skateboard 0.692563
+2021-03-26 22:59:34,432 - Model - INFO - eval mIoU of Table 0.793099
+2021-03-26 22:59:34,432 - Model - INFO - Epoch 16 test Accuracy: 0.930539 Class avg mIOU: 0.769812 Inctance avg mIOU: 0.816995
+2021-03-26 22:59:34,433 - Model - INFO - Best accuracy is: 0.93436
+2021-03-26 22:59:34,433 - Model - INFO - Best class avg mIOU is: 0.78945
+2021-03-26 22:59:34,433 - Model - INFO - Best inctance avg mIOU is: 0.82970
+2021-03-26 22:59:34,433 - Model - INFO - Epoch 17 (17/251):
+2021-03-26 22:59:34,433 - Model - INFO - Learning rate:0.001000
+2021-03-26 23:06:17,878 - Model - INFO - Train accuracy is: 0.93845
+2021-03-26 23:07:25,891 - Model - INFO - eval mIoU of Airplane 0.815051
+2021-03-26 23:07:25,892 - Model - INFO - eval mIoU of Bag 0.725564
+2021-03-26 23:07:25,892 - Model - INFO - eval mIoU of Cap 0.699182
+2021-03-26 23:07:25,892 - Model - INFO - eval mIoU of Car 0.743802
+2021-03-26 23:07:25,892 - Model - INFO - eval mIoU of Chair 0.894612
+2021-03-26 23:07:25,892 - Model - INFO - eval mIoU of Earphone 0.684550
+2021-03-26 23:07:25,892 - Model - INFO - eval mIoU of Guitar 0.898045
+2021-03-26 23:07:25,892 - Model - INFO - eval mIoU of Knife 0.862556
+2021-03-26 23:07:25,892 - Model - INFO - eval mIoU of Lamp 0.776021
+2021-03-26 23:07:25,892 - Model - INFO - eval mIoU of Laptop 0.947295
+2021-03-26 23:07:25,892 - Model - INFO - eval mIoU of Motorbike 0.562373
+2021-03-26 23:07:25,892 - Model - INFO - eval mIoU of Mug 0.939629
+2021-03-26 23:07:25,892 - Model - INFO - eval mIoU of Pistol 0.798981
+2021-03-26 23:07:25,892 - Model - INFO - eval mIoU of Rocket 0.322330
+2021-03-26 23:07:25,892 - Model - INFO - eval mIoU of Skateboard 0.725578
+2021-03-26 23:07:25,892 - Model - INFO - eval mIoU of Table 0.803527
+2021-03-26 23:07:25,893 - Model - INFO - Epoch 17 test Accuracy: 0.932389 Class avg mIOU: 0.762444 Inctance avg mIOU: 0.825451
+2021-03-26 23:07:25,893 - Model - INFO - Best accuracy is: 0.93436
+2021-03-26 23:07:25,893 - Model - INFO - Best class avg mIOU is: 0.78945
+2021-03-26 23:07:25,893 - Model - INFO - Best inctance avg mIOU is: 0.82970
+2021-03-26 23:07:25,893 - Model - INFO - Epoch 18 (18/251):
+2021-03-26 23:07:25,893 - Model - INFO - Learning rate:0.001000
+2021-03-26 23:14:13,618 - Model - INFO - Train accuracy is: 0.93848
+2021-03-26 23:15:21,892 - Model - INFO - eval mIoU of Airplane 0.787873
+2021-03-26 23:15:21,892 - Model - INFO - eval mIoU of Bag 0.598382
+2021-03-26 23:15:21,892 - Model - INFO - eval mIoU of Cap 0.796432
+2021-03-26 23:15:21,892 - Model - INFO - eval mIoU of Car 0.737337
+2021-03-26 23:15:21,892 - Model - INFO - eval mIoU of Chair 0.896021
+2021-03-26 23:15:21,892 - Model - INFO - eval mIoU of Earphone 0.699564
+2021-03-26 23:15:21,892 - Model - INFO - eval mIoU of Guitar 0.901602
+2021-03-26 23:15:21,892 - Model - INFO - eval mIoU of Knife 0.862917
+2021-03-26 23:15:21,892 - Model - INFO - eval mIoU of Lamp 0.807616
+2021-03-26 23:15:21,892 - Model - INFO - eval mIoU of Laptop 0.932531
+2021-03-26 23:15:21,892 - Model - INFO - eval mIoU of Motorbike 0.587366
+2021-03-26 23:15:21,892 - Model - INFO - eval mIoU of Mug 0.935962
+2021-03-26 23:15:21,892 - Model - INFO - eval mIoU of Pistol 0.728815
+2021-03-26 23:15:21,892 - Model - INFO - eval mIoU of Rocket 0.372551
+2021-03-26 23:15:21,892 - Model - INFO - eval mIoU of Skateboard 0.748590
+2021-03-26 23:15:21,892 - Model - INFO - eval mIoU of Table 0.794507
+2021-03-26 23:15:21,893 - Model - INFO - Epoch 18 test Accuracy: 0.926745 Class avg mIOU: 0.761754 Inctance avg mIOU: 0.822084
+2021-03-26 23:15:21,893 - Model - INFO - Best accuracy is: 0.93436
+2021-03-26 23:15:21,893 - Model - INFO - Best class avg mIOU is: 0.78945
+2021-03-26 23:15:21,893 - Model - INFO - Best inctance avg mIOU is: 0.82970
+2021-03-26 23:15:21,893 - Model - INFO - Epoch 19 (19/251):
+2021-03-26 23:15:21,893 - Model - INFO - Learning rate:0.001000
+2021-03-26 23:22:06,505 - Model - INFO - Train accuracy is: 0.93917
+2021-03-26 23:23:15,808 - Model - INFO - eval mIoU of Airplane 0.799432
+2021-03-26 23:23:15,810 - Model - INFO - eval mIoU of Bag 0.726938
+2021-03-26 23:23:15,810 - Model - INFO - eval mIoU of Cap 0.800803
+2021-03-26 23:23:15,810 - Model - INFO - eval mIoU of Car 0.740908
+2021-03-26 23:23:15,810 - Model - INFO - eval mIoU of Chair 0.893899
+2021-03-26 23:23:15,811 - Model - INFO - eval mIoU of Earphone 0.754681
+2021-03-26 23:23:15,811 - Model - INFO - eval mIoU of Guitar 0.900357
+2021-03-26 23:23:15,811 - Model - INFO - eval mIoU of Knife 0.831073
+2021-03-26 23:23:15,811 - Model - INFO - eval mIoU of Lamp 0.804959
+2021-03-26 23:23:15,811 - Model - INFO - eval mIoU of Laptop 0.943171
+2021-03-26 23:23:15,811 - Model - INFO - eval mIoU of Motorbike 0.515594
+2021-03-26 23:23:15,811 - Model - INFO - eval mIoU of Mug 0.905291
+2021-03-26 23:23:15,811 - Model - INFO - eval mIoU of Pistol 0.747309
+2021-03-26 23:23:15,811 - Model - INFO - eval mIoU of Rocket 0.572461
+2021-03-26 23:23:15,811 - Model - INFO - eval mIoU of Skateboard 0.756786
+2021-03-26 23:23:15,811 - Model - INFO - eval mIoU of Table 0.803390
+2021-03-26 23:23:15,812 - Model - INFO - Epoch 19 test Accuracy: 0.933484 Class avg mIOU: 0.781066 Inctance avg mIOU: 0.825279
+2021-03-26 23:23:15,812 - Model - INFO - Best accuracy is: 0.93436
+2021-03-26 23:23:15,812 - Model - INFO - Best class avg mIOU is: 0.78945
+2021-03-26 23:23:15,812 - Model - INFO - Best inctance avg mIOU is: 0.82970
+2021-03-26 23:23:15,812 - Model - INFO - Epoch 20 (20/251):
+2021-03-26 23:23:15,812 - Model - INFO - Learning rate:0.001000
+2021-03-26 23:29:59,123 - Model - INFO - Train accuracy is: 0.93879
+2021-03-26 23:31:06,916 - Model - INFO - eval mIoU of Airplane 0.793724
+2021-03-26 23:31:06,917 - Model - INFO - eval mIoU of Bag 0.656263
+2021-03-26 23:31:06,917 - Model - INFO - eval mIoU of Cap 0.839310
+2021-03-26 23:31:06,917 - Model - INFO - eval mIoU of Car 0.740163
+2021-03-26 23:31:06,917 - Model - INFO - eval mIoU of Chair 0.892496
+2021-03-26 23:31:06,917 - Model - INFO - eval mIoU of Earphone 0.707246
+2021-03-26 23:31:06,917 - Model - INFO - eval mIoU of Guitar 0.894878
+2021-03-26 23:31:06,917 - Model - INFO - eval mIoU of Knife 0.867941
+2021-03-26 23:31:06,917 - Model - INFO - eval mIoU of Lamp 0.811777
+2021-03-26 23:31:06,917 - Model - INFO - eval mIoU of Laptop 0.951030
+2021-03-26 23:31:06,917 - Model - INFO - eval mIoU of Motorbike 0.600180
+2021-03-26 23:31:06,917 - Model - INFO - eval mIoU of Mug 0.950150
+2021-03-26 23:31:06,917 - Model - INFO - eval mIoU of Pistol 0.803672
+2021-03-26 23:31:06,917 - Model - INFO - eval mIoU of Rocket 0.617057
+2021-03-26 23:31:06,917 - Model - INFO - eval mIoU of Skateboard 0.751989
+2021-03-26 23:31:06,917 - Model - INFO - eval mIoU of Table 0.801097
+2021-03-26 23:31:06,918 - Model - INFO - Epoch 20 test Accuracy: 0.928658 Class avg mIOU: 0.792436 Inctance avg mIOU: 0.827833
+2021-03-26 23:31:06,918 - Model - INFO - Best accuracy is: 0.93436
+2021-03-26 23:31:06,918 - Model - INFO - Best class avg mIOU is: 0.79244
+2021-03-26 23:31:06,918 - Model - INFO - Best inctance avg mIOU is: 0.82970
+2021-03-26 23:31:06,918 - Model - INFO - Epoch 21 (21/251):
+2021-03-26 23:31:06,918 - Model - INFO - Learning rate:0.000500
+2021-03-26 23:37:42,295 - Model - INFO - Train accuracy is: 0.94339
+2021-03-26 23:38:50,075 - Model - INFO - eval mIoU of Airplane 0.816772
+2021-03-26 23:38:50,075 - Model - INFO - eval mIoU of Bag 0.800016
+2021-03-26 23:38:50,075 - Model - INFO - eval mIoU of Cap 0.799365
+2021-03-26 23:38:50,076 - Model - INFO - eval mIoU of Car 0.728389
+2021-03-26 23:38:50,076 - Model - INFO - eval mIoU of Chair 0.903250
+2021-03-26 23:38:50,076 - Model - INFO - eval mIoU of Earphone 0.741962
+2021-03-26 23:38:50,076 - Model - INFO - eval mIoU of Guitar 0.891621
+2021-03-26 23:38:50,076 - Model - INFO - eval mIoU of Knife 0.876657
+2021-03-26 23:38:50,076 - Model - INFO - eval mIoU of Lamp 0.833977
+2021-03-26 23:38:50,077 - Model - INFO - eval mIoU of Laptop 0.954860
+2021-03-26 23:38:50,077 - Model - INFO - eval mIoU of Motorbike 0.612545
+2021-03-26 23:38:50,077 - Model - INFO - eval mIoU of Mug 0.945229
+2021-03-26 23:38:50,077 - Model - INFO - eval mIoU of Pistol 0.776495
+2021-03-26 23:38:50,077 - Model - INFO - eval mIoU of Rocket 0.540424
+2021-03-26 23:38:50,077 - Model - INFO - eval mIoU of Skateboard 0.741806
+2021-03-26 23:38:50,078 - Model - INFO - eval mIoU of Table 0.821490
+2021-03-26 23:38:50,078 - Model - INFO - Epoch 21 test Accuracy: 0.939202 Class avg mIOU: 0.799054 Inctance avg mIOU: 0.840979
+2021-03-26 23:38:50,078 - Model - INFO - Save model...
+2021-03-26 23:38:50,078 - Model - INFO - Saving at log/part_seg/pointnet2_part_seg_msg/checkpoints/best_model.pth
+2021-03-26 23:38:50,449 - Model - INFO - Saving model....
+2021-03-26 23:38:50,449 - Model - INFO - Best accuracy is: 0.93920
+2021-03-26 23:38:50,449 - Model - INFO - Best class avg mIOU is: 0.79905
+2021-03-26 23:38:50,449 - Model - INFO - Best inctance avg mIOU is: 0.84098
+2021-03-26 23:38:50,449 - Model - INFO - Epoch 22 (22/251):
+2021-03-26 23:38:50,450 - Model - INFO - Learning rate:0.000500
+2021-03-26 23:45:38,386 - Model - INFO - Train accuracy is: 0.94457
+2021-03-26 23:46:47,697 - Model - INFO - eval mIoU of Airplane 0.811184
+2021-03-26 23:46:47,697 - Model - INFO - eval mIoU of Bag 0.803084
+2021-03-26 23:46:47,697 - Model - INFO - eval mIoU of Cap 0.786658
+2021-03-26 23:46:47,697 - Model - INFO - eval mIoU of Car 0.756798
+2021-03-26 23:46:47,697 - Model - INFO - eval mIoU of Chair 0.894291
+2021-03-26 23:46:47,697 - Model - INFO - eval mIoU of Earphone 0.718408
+2021-03-26 23:46:47,697 - Model - INFO - eval mIoU of Guitar 0.898260
+2021-03-26 23:46:47,697 - Model - INFO - eval mIoU of Knife 0.852688
+2021-03-26 23:46:47,697 - Model - INFO - eval mIoU of Lamp 0.830923
+2021-03-26 23:46:47,697 - Model - INFO - eval mIoU of Laptop 0.953216
+2021-03-26 23:46:47,698 - Model - INFO - eval mIoU of Motorbike 0.545679
+2021-03-26 23:46:47,698 - Model - INFO - eval mIoU of Mug 0.953748
+2021-03-26 23:46:47,698 - Model - INFO - eval mIoU of Pistol 0.767468
+2021-03-26 23:46:47,698 - Model - INFO - eval mIoU of Rocket 0.575884
+2021-03-26 23:46:47,698 - Model - INFO - eval mIoU of Skateboard 0.754324
+2021-03-26 23:46:47,698 - Model - INFO - eval mIoU of Table 0.812925
+2021-03-26 23:46:47,698 - Model - INFO - Epoch 22 test Accuracy: 0.935353 Class avg mIOU: 0.794721 Inctance avg mIOU: 0.835427
+2021-03-26 23:46:47,698 - Model - INFO - Best accuracy is: 0.93920
+2021-03-26 23:46:47,699 - Model - INFO - Best class avg mIOU is: 0.79905
+2021-03-26 23:46:47,699 - Model - INFO - Best inctance avg mIOU is: 0.84098
+2021-03-26 23:46:47,699 - Model - INFO - Epoch 23 (23/251):
+2021-03-26 23:46:47,699 - Model - INFO - Learning rate:0.000500
+2021-03-26 23:53:34,075 - Model - INFO - Train accuracy is: 0.94483
+2021-03-26 23:54:42,512 - Model - INFO - eval mIoU of Airplane 0.792302
+2021-03-26 23:54:42,513 - Model - INFO - eval mIoU of Bag 0.776117
+2021-03-26 23:54:42,513 - Model - INFO - eval mIoU of Cap 0.840242
+2021-03-26 23:54:42,513 - Model - INFO - eval mIoU of Car 0.756080
+2021-03-26 23:54:42,513 - Model - INFO - eval mIoU of Chair 0.899687
+2021-03-26 23:54:42,513 - Model - INFO - eval mIoU of Earphone 0.750259
+2021-03-26 23:54:42,513 - Model - INFO - eval mIoU of Guitar 0.907003
+2021-03-26 23:54:42,513 - Model - INFO - eval mIoU of Knife 0.853144
+2021-03-26 23:54:42,513 - Model - INFO - eval mIoU of Lamp 0.835754
+2021-03-26 23:54:42,513 - Model - INFO - eval mIoU of Laptop 0.953340
+2021-03-26 23:54:42,513 - Model - INFO - eval mIoU of Motorbike 0.633187
+2021-03-26 23:54:42,513 - Model - INFO - eval mIoU of Mug 0.949878
+2021-03-26 23:54:42,513 - Model - INFO - eval mIoU of Pistol 0.810809
+2021-03-26 23:54:42,514 - Model - INFO - eval mIoU of Rocket 0.577964
+2021-03-26 23:54:42,514 - Model - INFO - eval mIoU of Skateboard 0.753939
+2021-03-26 23:54:42,514 - Model - INFO - eval mIoU of Table 0.825621
+2021-03-26 23:54:42,514 - Model - INFO - Epoch 23 test Accuracy: 0.938473 Class avg mIOU: 0.807208 Inctance avg mIOU: 0.841595
+2021-03-26 23:54:42,514 - Model - INFO - Save model...
+2021-03-26 23:54:42,514 - Model - INFO - Saving at log/part_seg/pointnet2_part_seg_msg/checkpoints/best_model.pth
+2021-03-26 23:54:43,011 - Model - INFO - Saving model....
+2021-03-26 23:54:43,011 - Model - INFO - Best accuracy is: 0.93920
+2021-03-26 23:54:43,011 - Model - INFO - Best class avg mIOU is: 0.80721
+2021-03-26 23:54:43,011 - Model - INFO - Best inctance avg mIOU is: 0.84159
+2021-03-26 23:54:43,011 - Model - INFO - Epoch 24 (24/251):
+2021-03-26 23:54:43,011 - Model - INFO - Learning rate:0.000500
+2021-03-27 00:01:27,847 - Model - INFO - Train accuracy is: 0.94435
+2021-03-27 00:02:37,858 - Model - INFO - eval mIoU of Airplane 0.806904
+2021-03-27 00:02:37,859 - Model - INFO - eval mIoU of Bag 0.764344
+2021-03-27 00:02:37,859 - Model - INFO - eval mIoU of Cap 0.800139
+2021-03-27 00:02:37,859 - Model - INFO - eval mIoU of Car 0.736278
+2021-03-27 00:02:37,859 - Model - INFO - eval mIoU of Chair 0.896805
+2021-03-27 00:02:37,859 - Model - INFO - eval mIoU of Earphone 0.769565
+2021-03-27 00:02:37,859 - Model - INFO - eval mIoU of Guitar 0.904448
+2021-03-27 00:02:37,859 - Model - INFO - eval mIoU of Knife 0.860830
+2021-03-27 00:02:37,859 - Model - INFO - eval mIoU of Lamp 0.847030
+2021-03-27 00:02:37,859 - Model - INFO - eval mIoU of Laptop 0.956625
+2021-03-27 00:02:37,859 - Model - INFO - eval mIoU of Motorbike 0.570826
+2021-03-27 00:02:37,859 - Model - INFO - eval mIoU of Mug 0.940232
+2021-03-27 00:02:37,859 - Model - INFO - eval mIoU of Pistol 0.774724
+2021-03-27 00:02:37,859 - Model - INFO - eval mIoU of Rocket 0.607698
+2021-03-27 00:02:37,859 - Model - INFO - eval mIoU of Skateboard 0.757068
+2021-03-27 00:02:37,859 - Model - INFO - eval mIoU of Table 0.813970
+2021-03-27 00:02:37,860 - Model - INFO - Epoch 24 test Accuracy: 0.937872 Class avg mIOU: 0.800468 Inctance avg mIOU: 0.837639
+2021-03-27 00:02:37,860 - Model - INFO - Best accuracy is: 0.93920
+2021-03-27 00:02:37,860 - Model - INFO - Best class avg mIOU is: 0.80721
+2021-03-27 00:02:37,860 - Model - INFO - Best inctance avg mIOU is: 0.84159
+2021-03-27 00:02:37,860 - Model - INFO - Epoch 25 (25/251):
+2021-03-27 00:02:37,860 - Model - INFO - Learning rate:0.000500
+2021-03-27 00:09:24,717 - Model - INFO - Train accuracy is: 0.94469
+2021-03-27 00:10:31,637 - Model - INFO - eval mIoU of Airplane 0.804579
+2021-03-27 00:10:31,637 - Model - INFO - eval mIoU of Bag 0.806638
+2021-03-27 00:10:31,637 - Model - INFO - eval mIoU of Cap 0.822264
+2021-03-27 00:10:31,637 - Model - INFO - eval mIoU of Car 0.751994
+2021-03-27 00:10:31,637 - Model - INFO - eval mIoU of Chair 0.896107
+2021-03-27 00:10:31,637 - Model - INFO - eval mIoU of Earphone 0.763699
+2021-03-27 00:10:31,637 - Model - INFO - eval mIoU of Guitar 0.898710
+2021-03-27 00:10:31,637 - Model - INFO - eval mIoU of Knife 0.855353
+2021-03-27 00:10:31,637 - Model - INFO - eval mIoU of Lamp 0.842790
+2021-03-27 00:10:31,637 - Model - INFO - eval mIoU of Laptop 0.956830
+2021-03-27 00:10:31,638 - Model - INFO - eval mIoU of Motorbike 0.602590
+2021-03-27 00:10:31,638 - Model - INFO - eval mIoU of Mug 0.945149
+2021-03-27 00:10:31,638 - Model - INFO - eval mIoU of Pistol 0.770214
+2021-03-27 00:10:31,638 - Model - INFO - eval mIoU of Rocket 0.469070
+2021-03-27 00:10:31,638 - Model - INFO - eval mIoU of Skateboard 0.750551
+2021-03-27 00:10:31,638 - Model - INFO - eval mIoU of Table 0.819657
+2021-03-27 00:10:31,638 - Model - INFO - Epoch 25 test Accuracy: 0.938350 Class avg mIOU: 0.797262 Inctance avg mIOU: 0.839021
+2021-03-27 00:10:31,638 - Model - INFO - Best accuracy is: 0.93920
+2021-03-27 00:10:31,638 - Model - INFO - Best class avg mIOU is: 0.80721
+2021-03-27 00:10:31,638 - Model - INFO - Best inctance avg mIOU is: 0.84159
+2021-03-27 00:10:31,638 - Model - INFO - Epoch 26 (26/251):
+2021-03-27 00:10:31,639 - Model - INFO - Learning rate:0.000500
+2021-03-27 00:17:14,095 - Model - INFO - Train accuracy is: 0.94452
+2021-03-27 00:18:22,345 - Model - INFO - eval mIoU of Airplane 0.801226
+2021-03-27 00:18:22,345 - Model - INFO - eval mIoU of Bag 0.788146
+2021-03-27 00:18:22,345 - Model - INFO - eval mIoU of Cap 0.834765
+2021-03-27 00:18:22,345 - Model - INFO - eval mIoU of Car 0.725625
+2021-03-27 00:18:22,345 - Model - INFO - eval mIoU of Chair 0.899579
+2021-03-27 00:18:22,345 - Model - INFO - eval mIoU of Earphone 0.738476
+2021-03-27 00:18:22,345 - Model - INFO - eval mIoU of Guitar 0.900899
+2021-03-27 00:18:22,345 - Model - INFO - eval mIoU of Knife 0.833697
+2021-03-27 00:18:22,345 - Model - INFO - eval mIoU of Lamp 0.810980
+2021-03-27 00:18:22,346 - Model - INFO - eval mIoU of Laptop 0.955883
+2021-03-27 00:18:22,346 - Model - INFO - eval mIoU of Motorbike 0.624355
+2021-03-27 00:18:22,346 - Model - INFO - eval mIoU of Mug 0.947317
+2021-03-27 00:18:22,346 - Model - INFO - eval mIoU of Pistol 0.798453
+2021-03-27 00:18:22,346 - Model - INFO - eval mIoU of Rocket 0.508646
+2021-03-27 00:18:22,346 - Model - INFO - eval mIoU of Skateboard 0.753172
+2021-03-27 00:18:22,346 - Model - INFO - eval mIoU of Table 0.791889
+2021-03-27 00:18:22,346 - Model - INFO - Epoch 26 test Accuracy: 0.934710 Class avg mIOU: 0.794569 Inctance avg mIOU: 0.827032
+2021-03-27 00:18:22,346 - Model - INFO - Best accuracy is: 0.93920
+2021-03-27 00:18:22,346 - Model - INFO - Best class avg mIOU is: 0.80721
+2021-03-27 00:18:22,347 - Model - INFO - Best inctance avg mIOU is: 0.84159
+2021-03-27 00:18:22,347 - Model - INFO - Epoch 27 (27/251):
+2021-03-27 00:18:22,347 - Model - INFO - Learning rate:0.000500
+2021-03-27 00:25:08,293 - Model - INFO - Train accuracy is: 0.94486
+2021-03-27 00:26:16,124 - Model - INFO - eval mIoU of Airplane 0.812115
+2021-03-27 00:26:16,124 - Model - INFO - eval mIoU of Bag 0.792955
+2021-03-27 00:26:16,124 - Model - INFO - eval mIoU of Cap 0.792855
+2021-03-27 00:26:16,125 - Model - INFO - eval mIoU of Car 0.743594
+2021-03-27 00:26:16,125 - Model - INFO - eval mIoU of Chair 0.905744
+2021-03-27 00:26:16,125 - Model - INFO - eval mIoU of Earphone 0.721517
+2021-03-27 00:26:16,125 - Model - INFO - eval mIoU of Guitar 0.894727
+2021-03-27 00:26:16,125 - Model - INFO - eval mIoU of Knife 0.860278
+2021-03-27 00:26:16,125 - Model - INFO - eval mIoU of Lamp 0.831319
+2021-03-27 00:26:16,125 - Model - INFO - eval mIoU of Laptop 0.954302
+2021-03-27 00:26:16,125 - Model - INFO - eval mIoU of Motorbike 0.613555
+2021-03-27 00:26:16,125 - Model - INFO - eval mIoU of Mug 0.949981
+2021-03-27 00:26:16,125 - Model - INFO - eval mIoU of Pistol 0.816691
+2021-03-27 00:26:16,125 - Model - INFO - eval mIoU of Rocket 0.532703
+2021-03-27 00:26:16,125 - Model - INFO - eval mIoU of Skateboard 0.699067
+2021-03-27 00:26:16,125 - Model - INFO - eval mIoU of Table 0.817567
+2021-03-27 00:26:16,126 - Model - INFO - Epoch 27 test Accuracy: 0.938742 Class avg mIOU: 0.796186 Inctance avg mIOU: 0.840195
+2021-03-27 00:26:16,126 - Model - INFO - Best accuracy is: 0.93920
+2021-03-27 00:26:16,126 - Model - INFO - Best class avg mIOU is: 0.80721
+2021-03-27 00:26:16,126 - Model - INFO - Best inctance avg mIOU is: 0.84159
+2021-03-27 00:26:16,126 - Model - INFO - Epoch 28 (28/251):
+2021-03-27 00:26:16,126 - Model - INFO - Learning rate:0.000500
+2021-03-27 00:32:58,886 - Model - INFO - Train accuracy is: 0.94503
+2021-03-27 00:34:04,538 - Model - INFO - eval mIoU of Airplane 0.797483
+2021-03-27 00:34:04,538 - Model - INFO - eval mIoU of Bag 0.800243
+2021-03-27 00:34:04,538 - Model - INFO - eval mIoU of Cap 0.846298
+2021-03-27 00:34:04,538 - Model - INFO - eval mIoU of Car 0.761818
+2021-03-27 00:34:04,538 - Model - INFO - eval mIoU of Chair 0.881875
+2021-03-27 00:34:04,538 - Model - INFO - eval mIoU of Earphone 0.759474
+2021-03-27 00:34:04,538 - Model - INFO - eval mIoU of Guitar 0.903232
+2021-03-27 00:34:04,538 - Model - INFO - eval mIoU of Knife 0.855200
+2021-03-27 00:34:04,538 - Model - INFO - eval mIoU of Lamp 0.840353
+2021-03-27 00:34:04,538 - Model - INFO - eval mIoU of Laptop 0.950923
+2021-03-27 00:34:04,539 - Model - INFO - eval mIoU of Motorbike 0.665455
+2021-03-27 00:34:04,539 - Model - INFO - eval mIoU of Mug 0.936860
+2021-03-27 00:34:04,539 - Model - INFO - eval mIoU of Pistol 0.796460
+2021-03-27 00:34:04,539 - Model - INFO - eval mIoU of Rocket 0.468601
+2021-03-27 00:34:04,539 - Model - INFO - eval mIoU of Skateboard 0.756046
+2021-03-27 00:34:04,539 - Model - INFO - eval mIoU of Table 0.816911
+2021-03-27 00:34:04,539 - Model - INFO - Epoch 28 test Accuracy: 0.938027 Class avg mIOU: 0.802327 Inctance avg mIOU: 0.835760
+2021-03-27 00:34:04,539 - Model - INFO - Best accuracy is: 0.93920
+2021-03-27 00:34:04,539 - Model - INFO - Best class avg mIOU is: 0.80721
+2021-03-27 00:34:04,539 - Model - INFO - Best inctance avg mIOU is: 0.84159
+2021-03-27 00:34:04,539 - Model - INFO - Epoch 29 (29/251):
+2021-03-27 00:34:04,539 - Model - INFO - Learning rate:0.000500
+2021-03-27 00:40:45,049 - Model - INFO - Train accuracy is: 0.94510
+2021-03-27 00:41:53,353 - Model - INFO - eval mIoU of Airplane 0.807367
+2021-03-27 00:41:53,354 - Model - INFO - eval mIoU of Bag 0.799440
+2021-03-27 00:41:53,354 - Model - INFO - eval mIoU of Cap 0.820698
+2021-03-27 00:41:53,354 - Model - INFO - eval mIoU of Car 0.721446
+2021-03-27 00:41:53,354 - Model - INFO - eval mIoU of Chair 0.889899
+2021-03-27 00:41:53,354 - Model - INFO - eval mIoU of Earphone 0.713681
+2021-03-27 00:41:53,354 - Model - INFO - eval mIoU of Guitar 0.906747
+2021-03-27 00:41:53,354 - Model - INFO - eval mIoU of Knife 0.868561
+2021-03-27 00:41:53,354 - Model - INFO - eval mIoU of Lamp 0.838945
+2021-03-27 00:41:53,354 - Model - INFO - eval mIoU of Laptop 0.955484
+2021-03-27 00:41:53,354 - Model - INFO - eval mIoU of Motorbike 0.634822
+2021-03-27 00:41:53,354 - Model - INFO - eval mIoU of Mug 0.943815
+2021-03-27 00:41:53,354 - Model - INFO - eval mIoU of Pistol 0.818801
+2021-03-27 00:41:53,354 - Model - INFO - eval mIoU of Rocket 0.582284
+2021-03-27 00:41:53,354 - Model - INFO - eval mIoU of Skateboard 0.750443
+2021-03-27 00:41:53,354 - Model - INFO - eval mIoU of Table 0.819915
+2021-03-27 00:41:53,354 - Model - INFO - Epoch 29 test Accuracy: 0.937821 Class avg mIOU: 0.804522 Inctance avg mIOU: 0.838103
+2021-03-27 00:41:53,354 - Model - INFO - Best accuracy is: 0.93920
+2021-03-27 00:41:53,355 - Model - INFO - Best class avg mIOU is: 0.80721
+2021-03-27 00:41:53,355 - Model - INFO - Best inctance avg mIOU is: 0.84159
+2021-03-27 00:41:53,355 - Model - INFO - Epoch 30 (30/251):
+2021-03-27 00:41:53,355 - Model - INFO - Learning rate:0.000500
+2021-03-27 00:48:33,854 - Model - INFO - Train accuracy is: 0.94536
+2021-03-27 00:49:43,380 - Model - INFO - eval mIoU of Airplane 0.813513
+2021-03-27 00:49:43,380 - Model - INFO - eval mIoU of Bag 0.759207
+2021-03-27 00:49:43,381 - Model - INFO - eval mIoU of Cap 0.675399
+2021-03-27 00:49:43,381 - Model - INFO - eval mIoU of Car 0.763064
+2021-03-27 00:49:43,381 - Model - INFO - eval mIoU of Chair 0.890880
+2021-03-27 00:49:43,381 - Model - INFO - eval mIoU of Earphone 0.723590
+2021-03-27 00:49:43,381 - Model - INFO - eval mIoU of Guitar 0.870870
+2021-03-27 00:49:43,381 - Model - INFO - eval mIoU of Knife 0.845272
+2021-03-27 00:49:43,381 - Model - INFO - eval mIoU of Lamp 0.844336
+2021-03-27 00:49:43,381 - Model - INFO - eval mIoU of Laptop 0.953989
+2021-03-27 00:49:43,381 - Model - INFO - eval mIoU of Motorbike 0.575322
+2021-03-27 00:49:43,381 - Model - INFO - eval mIoU of Mug 0.945457
+2021-03-27 00:49:43,381 - Model - INFO - eval mIoU of Pistol 0.827442
+2021-03-27 00:49:43,381 - Model - INFO - eval mIoU of Rocket 0.635665
+2021-03-27 00:49:43,381 - Model - INFO - eval mIoU of Skateboard 0.749736
+2021-03-27 00:49:43,381 - Model - INFO - eval mIoU of Table 0.820511
+2021-03-27 00:49:43,382 - Model - INFO - Epoch 30 test Accuracy: 0.938310 Class avg mIOU: 0.793391 Inctance avg mIOU: 0.838007
+2021-03-27 00:49:43,382 - Model - INFO - Best accuracy is: 0.93920
+2021-03-27 00:49:43,382 - Model - INFO - Best class avg mIOU is: 0.80721
+2021-03-27 00:49:43,382 - Model - INFO - Best inctance avg mIOU is: 0.84159
+2021-03-27 00:49:43,382 - Model - INFO - Epoch 31 (31/251):
+2021-03-27 00:49:43,382 - Model - INFO - Learning rate:0.000500
+2021-03-27 00:56:19,506 - Model - INFO - Train accuracy is: 0.94538
+2021-03-27 00:57:28,064 - Model - INFO - eval mIoU of Airplane 0.812283
+2021-03-27 00:57:28,065 - Model - INFO - eval mIoU of Bag 0.774895
+2021-03-27 00:57:28,065 - Model - INFO - eval mIoU of Cap 0.822541
+2021-03-27 00:57:28,065 - Model - INFO - eval mIoU of Car 0.758625
+2021-03-27 00:57:28,065 - Model - INFO - eval mIoU of Chair 0.895457
+2021-03-27 00:57:28,065 - Model - INFO - eval mIoU of Earphone 0.723497
+2021-03-27 00:57:28,066 - Model - INFO - eval mIoU of Guitar 0.901239
+2021-03-27 00:57:28,066 - Model - INFO - eval mIoU of Knife 0.860929
+2021-03-27 00:57:28,066 - Model - INFO - eval mIoU of Lamp 0.836173
+2021-03-27 00:57:28,066 - Model - INFO - eval mIoU of Laptop 0.946892
+2021-03-27 00:57:28,066 - Model - INFO - eval mIoU of Motorbike 0.510167
+2021-03-27 00:57:28,066 - Model - INFO - eval mIoU of Mug 0.936977
+2021-03-27 00:57:28,066 - Model - INFO - eval mIoU of Pistol 0.749348
+2021-03-27 00:57:28,066 - Model - INFO - eval mIoU of Rocket 0.573592
+2021-03-27 00:57:28,066 - Model - INFO - eval mIoU of Skateboard 0.746952
+2021-03-27 00:57:28,066 - Model - INFO - eval mIoU of Table 0.816704
+2021-03-27 00:57:28,066 - Model - INFO - Epoch 31 test Accuracy: 0.933956 Class avg mIOU: 0.791642 Inctance avg mIOU: 0.836599
+2021-03-27 00:57:28,066 - Model - INFO - Best accuracy is: 0.93920
+2021-03-27 00:57:28,067 - Model - INFO - Best class avg mIOU is: 0.80721
+2021-03-27 00:57:28,067 - Model - INFO - Best inctance avg mIOU is: 0.84159
+2021-03-27 00:57:28,067 - Model - INFO - Epoch 32 (32/251):
+2021-03-27 00:57:28,067 - Model - INFO - Learning rate:0.000500
+2021-03-27 01:04:16,870 - Model - INFO - Train accuracy is: 0.94518
+2021-03-27 01:05:24,969 - Model - INFO - eval mIoU of Airplane 0.807495
+2021-03-27 01:05:24,969 - Model - INFO - eval mIoU of Bag 0.705619
+2021-03-27 01:05:24,969 - Model - INFO - eval mIoU of Cap 0.806371
+2021-03-27 01:05:24,969 - Model - INFO - eval mIoU of Car 0.740795
+2021-03-27 01:05:24,969 - Model - INFO - eval mIoU of Chair 0.893921
+2021-03-27 01:05:24,970 - Model - INFO - eval mIoU of Earphone 0.723740
+2021-03-27 01:05:24,970 - Model - INFO - eval mIoU of Guitar 0.905550
+2021-03-27 01:05:24,970 - Model - INFO - eval mIoU of Knife 0.831842
+2021-03-27 01:05:24,970 - Model - INFO - eval mIoU of Lamp 0.823613
+2021-03-27 01:05:24,970 - Model - INFO - eval mIoU of Laptop 0.950212
+2021-03-27 01:05:24,970 - Model - INFO - eval mIoU of Motorbike 0.561767
+2021-03-27 01:05:24,970 - Model - INFO - eval mIoU of Mug 0.929106
+2021-03-27 01:05:24,970 - Model - INFO - eval mIoU of Pistol 0.784318
+2021-03-27 01:05:24,970 - Model - INFO - eval mIoU of Rocket 0.572040
+2021-03-27 01:05:24,970 - Model - INFO - eval mIoU of Skateboard 0.757138
+2021-03-27 01:05:24,970 - Model - INFO - eval mIoU of Table 0.802427
+2021-03-27 01:05:24,970 - Model - INFO - Epoch 32 test Accuracy: 0.935617 Class avg mIOU: 0.787247 Inctance avg mIOU: 0.829789
+2021-03-27 01:05:24,970 - Model - INFO - Best accuracy is: 0.93920
+2021-03-27 01:05:24,970 - Model - INFO - Best class avg mIOU is: 0.80721
+2021-03-27 01:05:24,970 - Model - INFO - Best inctance avg mIOU is: 0.84159
+2021-03-27 01:05:24,971 - Model - INFO - Epoch 33 (33/251):
+2021-03-27 01:05:24,971 - Model - INFO - Learning rate:0.000500
+2021-03-27 01:12:06,331 - Model - INFO - Train accuracy is: 0.94543
+2021-03-27 01:13:12,697 - Model - INFO - eval mIoU of Airplane 0.796503
+2021-03-27 01:13:12,697 - Model - INFO - eval mIoU of Bag 0.799226
+2021-03-27 01:13:12,697 - Model - INFO - eval mIoU of Cap 0.846912
+2021-03-27 01:13:12,697 - Model - INFO - eval mIoU of Car 0.750073
+2021-03-27 01:13:12,697 - Model - INFO - eval mIoU of Chair 0.892912
+2021-03-27 01:13:12,697 - Model - INFO - eval mIoU of Earphone 0.777173
+2021-03-27 01:13:12,697 - Model - INFO - eval mIoU of Guitar 0.900542
+2021-03-27 01:13:12,697 - Model - INFO - eval mIoU of Knife 0.847073
+2021-03-27 01:13:12,698 - Model - INFO - eval mIoU of Lamp 0.833675
+2021-03-27 01:13:12,698 - Model - INFO - eval mIoU of Laptop 0.955616
+2021-03-27 01:13:12,698 - Model - INFO - eval mIoU of Motorbike 0.500947
+2021-03-27 01:13:12,698 - Model - INFO - eval mIoU of Mug 0.944444
+2021-03-27 01:13:12,698 - Model - INFO - eval mIoU of Pistol 0.740599
+2021-03-27 01:13:12,698 - Model - INFO - eval mIoU of Rocket 0.541039
+2021-03-27 01:13:12,698 - Model - INFO - eval mIoU of Skateboard 0.751504
+2021-03-27 01:13:12,698 - Model - INFO - eval mIoU of Table 0.808828
+2021-03-27 01:13:12,698 - Model - INFO - Epoch 33 test Accuracy: 0.934683 Class avg mIOU: 0.792942 Inctance avg mIOU: 0.831076
+2021-03-27 01:13:12,699 - Model - INFO - Best accuracy is: 0.93920
+2021-03-27 01:13:12,699 - Model - INFO - Best class avg mIOU is: 0.80721
+2021-03-27 01:13:12,699 - Model - INFO - Best inctance avg mIOU is: 0.84159
+2021-03-27 01:13:12,699 - Model - INFO - Epoch 34 (34/251):
+2021-03-27 01:13:12,699 - Model - INFO - Learning rate:0.000500
+2021-03-27 01:19:53,978 - Model - INFO - Train accuracy is: 0.94635
+2021-03-27 01:21:03,018 - Model - INFO - eval mIoU of Airplane 0.809990
+2021-03-27 01:21:03,018 - Model - INFO - eval mIoU of Bag 0.791515
+2021-03-27 01:21:03,018 - Model - INFO - eval mIoU of Cap 0.818957
+2021-03-27 01:21:03,018 - Model - INFO - eval mIoU of Car 0.714866
+2021-03-27 01:21:03,018 - Model - INFO - eval mIoU of Chair 0.895067
+2021-03-27 01:21:03,018 - Model - INFO - eval mIoU of Earphone 0.671363
+2021-03-27 01:21:03,018 - Model - INFO - eval mIoU of Guitar 0.900369
+2021-03-27 01:21:03,018 - Model - INFO - eval mIoU of Knife 0.847954
+2021-03-27 01:21:03,018 - Model - INFO - eval mIoU of Lamp 0.844403
+2021-03-27 01:21:03,018 - Model - INFO - eval mIoU of Laptop 0.952860
+2021-03-27 01:21:03,018 - Model - INFO - eval mIoU of Motorbike 0.611205
+2021-03-27 01:21:03,018 - Model - INFO - eval mIoU of Mug 0.947124
+2021-03-27 01:21:03,018 - Model - INFO - eval mIoU of Pistol 0.822623
+2021-03-27 01:21:03,019 - Model - INFO - eval mIoU of Rocket 0.605266
+2021-03-27 01:21:03,019 - Model - INFO - eval mIoU of Skateboard 0.755757
+2021-03-27 01:21:03,019 - Model - INFO - eval mIoU of Table 0.818699
+2021-03-27 01:21:03,019 - Model - INFO - Epoch 34 test Accuracy: 0.935844 Class avg mIOU: 0.800501 Inctance avg mIOU: 0.838086
+2021-03-27 01:21:03,019 - Model - INFO - Best accuracy is: 0.93920
+2021-03-27 01:21:03,019 - Model - INFO - Best class avg mIOU is: 0.80721
+2021-03-27 01:21:03,019 - Model - INFO - Best inctance avg mIOU is: 0.84159
+2021-03-27 01:21:03,019 - Model - INFO - Epoch 35 (35/251):
+2021-03-27 01:21:03,019 - Model - INFO - Learning rate:0.000500
+2021-03-27 01:27:54,227 - Model - INFO - Train accuracy is: 0.94562
+2021-03-27 01:29:02,597 - Model - INFO - eval mIoU of Airplane 0.781260
+2021-03-27 01:29:02,598 - Model - INFO - eval mIoU of Bag 0.661572
+2021-03-27 01:29:02,598 - Model - INFO - eval mIoU of Cap 0.798461
+2021-03-27 01:29:02,598 - Model - INFO - eval mIoU of Car 0.749360
+2021-03-27 01:29:02,598 - Model - INFO - eval mIoU of Chair 0.895714
+2021-03-27 01:29:02,598 - Model - INFO - eval mIoU of Earphone 0.709717
+2021-03-27 01:29:02,598 - Model - INFO - eval mIoU of Guitar 0.895970
+2021-03-27 01:29:02,598 - Model - INFO - eval mIoU of Knife 0.862867
+2021-03-27 01:29:02,598 - Model - INFO - eval mIoU of Lamp 0.827342
+2021-03-27 01:29:02,598 - Model - INFO - eval mIoU of Laptop 0.952386
+2021-03-27 01:29:02,598 - Model - INFO - eval mIoU of Motorbike 0.579470
+2021-03-27 01:29:02,598 - Model - INFO - eval mIoU of Mug 0.937700
+2021-03-27 01:29:02,598 - Model - INFO - eval mIoU of Pistol 0.805464
+2021-03-27 01:29:02,598 - Model - INFO - eval mIoU of Rocket 0.626817
+2021-03-27 01:29:02,598 - Model - INFO - eval mIoU of Skateboard 0.739239
+2021-03-27 01:29:02,598 - Model - INFO - eval mIoU of Table 0.806987
+2021-03-27 01:29:02,599 - Model - INFO - Epoch 35 test Accuracy: 0.935815 Class avg mIOU: 0.789395 Inctance avg mIOU: 0.830173
+2021-03-27 01:29:02,599 - Model - INFO - Best accuracy is: 0.93920
+2021-03-27 01:29:02,599 - Model - INFO - Best class avg mIOU is: 0.80721
+2021-03-27 01:29:02,599 - Model - INFO - Best inctance avg mIOU is: 0.84159
+2021-03-27 01:29:02,599 - Model - INFO - Epoch 36 (36/251):
+2021-03-27 01:29:02,599 - Model - INFO - Learning rate:0.000500
+2021-03-27 01:35:39,548 - Model - INFO - Train accuracy is: 0.94566
+2021-03-27 01:36:45,608 - Model - INFO - eval mIoU of Airplane 0.815976
+2021-03-27 01:36:45,608 - Model - INFO - eval mIoU of Bag 0.791000
+2021-03-27 01:36:45,608 - Model - INFO - eval mIoU of Cap 0.849048
+2021-03-27 01:36:45,608 - Model - INFO - eval mIoU of Car 0.743936
+2021-03-27 01:36:45,608 - Model - INFO - eval mIoU of Chair 0.897693
+2021-03-27 01:36:45,608 - Model - INFO - eval mIoU of Earphone 0.723625
+2021-03-27 01:36:45,608 - Model - INFO - eval mIoU of Guitar 0.903431
+2021-03-27 01:36:45,608 - Model - INFO - eval mIoU of Knife 0.862657
+2021-03-27 01:36:45,608 - Model - INFO - eval mIoU of Lamp 0.837746
+2021-03-27 01:36:45,608 - Model - INFO - eval mIoU of Laptop 0.948329
+2021-03-27 01:36:45,608 - Model - INFO - eval mIoU of Motorbike 0.679649
+2021-03-27 01:36:45,608 - Model - INFO - eval mIoU of Mug 0.935220
+2021-03-27 01:36:45,608 - Model - INFO - eval mIoU of Pistol 0.780797
+2021-03-27 01:36:45,609 - Model - INFO - eval mIoU of Rocket 0.500828
+2021-03-27 01:36:45,609 - Model - INFO - eval mIoU of Skateboard 0.725566
+2021-03-27 01:36:45,609 - Model - INFO - eval mIoU of Table 0.815927
+2021-03-27 01:36:45,609 - Model - INFO - Epoch 36 test Accuracy: 0.938850 Class avg mIOU: 0.800714 Inctance avg mIOU: 0.840028
+2021-03-27 01:36:45,609 - Model - INFO - Best accuracy is: 0.93920
+2021-03-27 01:36:45,609 - Model - INFO - Best class avg mIOU is: 0.80721
+2021-03-27 01:36:45,609 - Model - INFO - Best inctance avg mIOU is: 0.84159
+2021-03-27 01:36:45,609 - Model - INFO - Epoch 37 (37/251):
+2021-03-27 01:36:45,609 - Model - INFO - Learning rate:0.000500
+2021-03-27 01:43:33,517 - Model - INFO - Train accuracy is: 0.94601
+2021-03-27 01:44:40,608 - Model - INFO - eval mIoU of Airplane 0.778547
+2021-03-27 01:44:40,608 - Model - INFO - eval mIoU of Bag 0.778884
+2021-03-27 01:44:40,608 - Model - INFO - eval mIoU of Cap 0.834475
+2021-03-27 01:44:40,608 - Model - INFO - eval mIoU of Car 0.732402
+2021-03-27 01:44:40,608 - Model - INFO - eval mIoU of Chair 0.898035
+2021-03-27 01:44:40,608 - Model - INFO - eval mIoU of Earphone 0.672523
+2021-03-27 01:44:40,608 - Model - INFO - eval mIoU of Guitar 0.905898
+2021-03-27 01:44:40,608 - Model - INFO - eval mIoU of Knife 0.864011
+2021-03-27 01:44:40,608 - Model - INFO - eval mIoU of Lamp 0.843097
+2021-03-27 01:44:40,608 - Model - INFO - eval mIoU of Laptop 0.953997
+2021-03-27 01:44:40,609 - Model - INFO - eval mIoU of Motorbike 0.615982
+2021-03-27 01:44:40,609 - Model - INFO - eval mIoU of Mug 0.936831
+2021-03-27 01:44:40,609 - Model - INFO - eval mIoU of Pistol 0.813216
+2021-03-27 01:44:40,609 - Model - INFO - eval mIoU of Rocket 0.515677
+2021-03-27 01:44:40,609 - Model - INFO - eval mIoU of Skateboard 0.740718
+2021-03-27 01:44:40,609 - Model - INFO - eval mIoU of Table 0.822946
+2021-03-27 01:44:40,609 - Model - INFO - Epoch 37 test Accuracy: 0.937556 Class avg mIOU: 0.794202 Inctance avg mIOU: 0.837227
+2021-03-27 01:44:40,609 - Model - INFO - Best accuracy is: 0.93920
+2021-03-27 01:44:40,609 - Model - INFO - Best class avg mIOU is: 0.80721
+2021-03-27 01:44:40,609 - Model - INFO - Best inctance avg mIOU is: 0.84159
+2021-03-27 01:44:40,609 - Model - INFO - Epoch 38 (38/251):
+2021-03-27 01:44:40,609 - Model - INFO - Learning rate:0.000500
+2021-03-27 01:51:23,688 - Model - INFO - Train accuracy is: 0.94611
+2021-03-27 01:52:30,801 - Model - INFO - eval mIoU of Airplane 0.808472
+2021-03-27 01:52:30,801 - Model - INFO - eval mIoU of Bag 0.697186
+2021-03-27 01:52:30,801 - Model - INFO - eval mIoU of Cap 0.856952
+2021-03-27 01:52:30,801 - Model - INFO - eval mIoU of Car 0.764632
+2021-03-27 01:52:30,801 - Model - INFO - eval mIoU of Chair 0.895355
+2021-03-27 01:52:30,801 - Model - INFO - eval mIoU of Earphone 0.731850
+2021-03-27 01:52:30,802 - Model - INFO - eval mIoU of Guitar 0.904261
+2021-03-27 01:52:30,802 - Model - INFO - eval mIoU of Knife 0.858945
+2021-03-27 01:52:30,802 - Model - INFO - eval mIoU of Lamp 0.838038
+2021-03-27 01:52:30,802 - Model - INFO - eval mIoU of Laptop 0.954941
+2021-03-27 01:52:30,802 - Model - INFO - eval mIoU of Motorbike 0.609232
+2021-03-27 01:52:30,802 - Model - INFO - eval mIoU of Mug 0.942049
+2021-03-27 01:52:30,802 - Model - INFO - eval mIoU of Pistol 0.779493
+2021-03-27 01:52:30,802 - Model - INFO - eval mIoU of Rocket 0.571687
+2021-03-27 01:52:30,802 - Model - INFO - eval mIoU of Skateboard 0.739577
+2021-03-27 01:52:30,802 - Model - INFO - eval mIoU of Table 0.825189
+2021-03-27 01:52:30,802 - Model - INFO - Epoch 38 test Accuracy: 0.939396 Class avg mIOU: 0.798616 Inctance avg mIOU: 0.841479
+2021-03-27 01:52:30,802 - Model - INFO - Best accuracy is: 0.93940
+2021-03-27 01:52:30,802 - Model - INFO - Best class avg mIOU is: 0.80721
+2021-03-27 01:52:30,802 - Model - INFO - Best inctance avg mIOU is: 0.84159
+2021-03-27 01:52:30,803 - Model - INFO - Epoch 39 (39/251):
+2021-03-27 01:52:30,803 - Model - INFO - Learning rate:0.000500
+2021-03-27 01:59:13,426 - Model - INFO - Train accuracy is: 0.94614
+2021-03-27 02:00:21,667 - Model - INFO - eval mIoU of Airplane 0.790163
+2021-03-27 02:00:21,667 - Model - INFO - eval mIoU of Bag 0.788820
+2021-03-27 02:00:21,667 - Model - INFO - eval mIoU of Cap 0.749452
+2021-03-27 02:00:21,667 - Model - INFO - eval mIoU of Car 0.755786
+2021-03-27 02:00:21,667 - Model - INFO - eval mIoU of Chair 0.893232
+2021-03-27 02:00:21,667 - Model - INFO - eval mIoU of Earphone 0.741711
+2021-03-27 02:00:21,667 - Model - INFO - eval mIoU of Guitar 0.904971
+2021-03-27 02:00:21,667 - Model - INFO - eval mIoU of Knife 0.856376
+2021-03-27 02:00:21,667 - Model - INFO - eval mIoU of Lamp 0.843584
+2021-03-27 02:00:21,668 - Model - INFO - eval mIoU of Laptop 0.954127
+2021-03-27 02:00:21,668 - Model - INFO - eval mIoU of Motorbike 0.655952
+2021-03-27 02:00:21,668 - Model - INFO - eval mIoU of Mug 0.935363
+2021-03-27 02:00:21,668 - Model - INFO - eval mIoU of Pistol 0.793638
+2021-03-27 02:00:21,668 - Model - INFO - eval mIoU of Rocket 0.485712
+2021-03-27 02:00:21,668 - Model - INFO - eval mIoU of Skateboard 0.736825
+2021-03-27 02:00:21,668 - Model - INFO - eval mIoU of Table 0.820574
+2021-03-27 02:00:21,668 - Model - INFO - Epoch 39 test Accuracy: 0.939107 Class avg mIOU: 0.794143 Inctance avg mIOU: 0.838086
+2021-03-27 02:00:21,668 - Model - INFO - Best accuracy is: 0.93940
+2021-03-27 02:00:21,669 - Model - INFO - Best class avg mIOU is: 0.80721
+2021-03-27 02:00:21,669 - Model - INFO - Best inctance avg mIOU is: 0.84159
+2021-03-27 02:00:21,669 - Model - INFO - Epoch 40 (40/251):
+2021-03-27 02:00:21,669 - Model - INFO - Learning rate:0.000500
+2021-03-27 02:07:02,765 - Model - INFO - Train accuracy is: 0.94611
+2021-03-27 02:08:08,987 - Model - INFO - eval mIoU of Airplane 0.814541
+2021-03-27 02:08:08,988 - Model - INFO - eval mIoU of Bag 0.753269
+2021-03-27 02:08:08,988 - Model - INFO - eval mIoU of Cap 0.830367
+2021-03-27 02:08:08,988 - Model - INFO - eval mIoU of Car 0.748879
+2021-03-27 02:08:08,988 - Model - INFO - eval mIoU of Chair 0.901715
+2021-03-27 02:08:08,988 - Model - INFO - eval mIoU of Earphone 0.683699
+2021-03-27 02:08:08,988 - Model - INFO - eval mIoU of Guitar 0.895664
+2021-03-27 02:08:08,988 - Model - INFO - eval mIoU of Knife 0.860787
+2021-03-27 02:08:08,988 - Model - INFO - eval mIoU of Lamp 0.841572
+2021-03-27 02:08:08,988 - Model - INFO - eval mIoU of Laptop 0.955296
+2021-03-27 02:08:08,988 - Model - INFO - eval mIoU of Motorbike 0.624389
+2021-03-27 02:08:08,988 - Model - INFO - eval mIoU of Mug 0.917507
+2021-03-27 02:08:08,988 - Model - INFO - eval mIoU of Pistol 0.804190
+2021-03-27 02:08:08,988 - Model - INFO - eval mIoU of Rocket 0.573615
+2021-03-27 02:08:08,988 - Model - INFO - eval mIoU of Skateboard 0.748946
+2021-03-27 02:08:08,988 - Model - INFO - eval mIoU of Table 0.820308
+2021-03-27 02:08:08,989 - Model - INFO - Epoch 40 test Accuracy: 0.938602 Class avg mIOU: 0.798422 Inctance avg mIOU: 0.841757
+2021-03-27 02:08:08,989 - Model - INFO - Save model...
+2021-03-27 02:08:08,989 - Model - INFO - Saving at log/part_seg/pointnet2_part_seg_msg/checkpoints/best_model.pth
+2021-03-27 02:08:09,085 - Model - INFO - Saving model....
+2021-03-27 02:08:09,085 - Model - INFO - Best accuracy is: 0.93940
+2021-03-27 02:08:09,085 - Model - INFO - Best class avg mIOU is: 0.80721
+2021-03-27 02:08:09,085 - Model - INFO - Best inctance avg mIOU is: 0.84176
+2021-03-27 02:08:09,085 - Model - INFO - Epoch 41 (41/251):
+2021-03-27 02:08:09,085 - Model - INFO - Learning rate:0.000250
+2021-03-27 02:14:49,084 - Model - INFO - Train accuracy is: 0.94866
+2021-03-27 02:15:57,642 - Model - INFO - eval mIoU of Airplane 0.802929
+2021-03-27 02:15:57,642 - Model - INFO - eval mIoU of Bag 0.786436
+2021-03-27 02:15:57,642 - Model - INFO - eval mIoU of Cap 0.848500
+2021-03-27 02:15:57,642 - Model - INFO - eval mIoU of Car 0.759065
+2021-03-27 02:15:57,642 - Model - INFO - eval mIoU of Chair 0.900716
+2021-03-27 02:15:57,642 - Model - INFO - eval mIoU of Earphone 0.740553
+2021-03-27 02:15:57,642 - Model - INFO - eval mIoU of Guitar 0.903400
+2021-03-27 02:15:57,642 - Model - INFO - eval mIoU of Knife 0.853274
+2021-03-27 02:15:57,642 - Model - INFO - eval mIoU of Lamp 0.849822
+2021-03-27 02:15:57,642 - Model - INFO - eval mIoU of Laptop 0.953012
+2021-03-27 02:15:57,642 - Model - INFO - eval mIoU of Motorbike 0.668634
+2021-03-27 02:15:57,642 - Model - INFO - eval mIoU of Mug 0.933981
+2021-03-27 02:15:57,643 - Model - INFO - eval mIoU of Pistol 0.797169
+2021-03-27 02:15:57,643 - Model - INFO - eval mIoU of Rocket 0.567754
+2021-03-27 02:15:57,643 - Model - INFO - eval mIoU of Skateboard 0.747896
+2021-03-27 02:15:57,643 - Model - INFO - eval mIoU of Table 0.829509
+2021-03-27 02:15:57,643 - Model - INFO - Epoch 41 test Accuracy: 0.940652 Class avg mIOU: 0.808916 Inctance avg mIOU: 0.845750
+2021-03-27 02:15:57,643 - Model - INFO - Save model...
+2021-03-27 02:15:57,643 - Model - INFO - Saving at log/part_seg/pointnet2_part_seg_msg/checkpoints/best_model.pth
+2021-03-27 02:15:57,720 - Model - INFO - Saving model....
+2021-03-27 02:15:57,721 - Model - INFO - Best accuracy is: 0.94065
+2021-03-27 02:15:57,721 - Model - INFO - Best class avg mIOU is: 0.80892
+2021-03-27 02:15:57,721 - Model - INFO - Best inctance avg mIOU is: 0.84575
+2021-03-27 02:15:57,721 - Model - INFO - Epoch 42 (42/251):
+2021-03-27 02:15:57,721 - Model - INFO - Learning rate:0.000250
+2021-03-27 02:22:47,145 - Model - INFO - Train accuracy is: 0.94924
+2021-03-27 02:23:56,070 - Model - INFO - eval mIoU of Airplane 0.821018
+2021-03-27 02:23:56,070 - Model - INFO - eval mIoU of Bag 0.784118
+2021-03-27 02:23:56,070 - Model - INFO - eval mIoU of Cap 0.847627
+2021-03-27 02:23:56,070 - Model - INFO - eval mIoU of Car 0.766102
+2021-03-27 02:23:56,070 - Model - INFO - eval mIoU of Chair 0.890644
+2021-03-27 02:23:56,070 - Model - INFO - eval mIoU of Earphone 0.742761
+2021-03-27 02:23:56,070 - Model - INFO - eval mIoU of Guitar 0.874400
+2021-03-27 02:23:56,070 - Model - INFO - eval mIoU of Knife 0.852404
+2021-03-27 02:23:56,070 - Model - INFO - eval mIoU of Lamp 0.841019
+2021-03-27 02:23:56,070 - Model - INFO - eval mIoU of Laptop 0.954781
+2021-03-27 02:23:56,070 - Model - INFO - eval mIoU of Motorbike 0.709546
+2021-03-27 02:23:56,070 - Model - INFO - eval mIoU of Mug 0.944007
+2021-03-27 02:23:56,070 - Model - INFO - eval mIoU of Pistol 0.808958
+2021-03-27 02:23:56,070 - Model - INFO - eval mIoU of Rocket 0.599981
+2021-03-27 02:23:56,070 - Model - INFO - eval mIoU of Skateboard 0.741777
+2021-03-27 02:23:56,070 - Model - INFO - eval mIoU of Table 0.828259
+2021-03-27 02:23:56,071 - Model - INFO - Epoch 42 test Accuracy: 0.939378 Class avg mIOU: 0.812963 Inctance avg mIOU: 0.844098
+2021-03-27 02:23:56,071 - Model - INFO - Best accuracy is: 0.94065
+2021-03-27 02:23:56,071 - Model - INFO - Best class avg mIOU is: 0.81296
+2021-03-27 02:23:56,071 - Model - INFO - Best inctance avg mIOU is: 0.84575
+2021-03-27 02:23:56,071 - Model - INFO - Epoch 43 (43/251):
+2021-03-27 02:23:56,071 - Model - INFO - Learning rate:0.000250
+2021-03-27 02:30:39,615 - Model - INFO - Train accuracy is: 0.94940
+2021-03-27 02:31:46,300 - Model - INFO - eval mIoU of Airplane 0.823393
+2021-03-27 02:31:46,300 - Model - INFO - eval mIoU of Bag 0.809735
+2021-03-27 02:31:46,300 - Model - INFO - eval mIoU of Cap 0.826647
+2021-03-27 02:31:46,300 - Model - INFO - eval mIoU of Car 0.762923
+2021-03-27 02:31:46,300 - Model - INFO - eval mIoU of Chair 0.899862
+2021-03-27 02:31:46,300 - Model - INFO - eval mIoU of Earphone 0.759822
+2021-03-27 02:31:46,300 - Model - INFO - eval mIoU of Guitar 0.904126
+2021-03-27 02:31:46,300 - Model - INFO - eval mIoU of Knife 0.864549
+2021-03-27 02:31:46,300 - Model - INFO - eval mIoU of Lamp 0.846147
+2021-03-27 02:31:46,300 - Model - INFO - eval mIoU of Laptop 0.953527
+2021-03-27 02:31:46,301 - Model - INFO - eval mIoU of Motorbike 0.644269
+2021-03-27 02:31:46,301 - Model - INFO - eval mIoU of Mug 0.930900
+2021-03-27 02:31:46,301 - Model - INFO - eval mIoU of Pistol 0.776027
+2021-03-27 02:31:46,301 - Model - INFO - eval mIoU of Rocket 0.603574
+2021-03-27 02:31:46,301 - Model - INFO - eval mIoU of Skateboard 0.749992
+2021-03-27 02:31:46,301 - Model - INFO - eval mIoU of Table 0.825067
+2021-03-27 02:31:46,301 - Model - INFO - Epoch 43 test Accuracy: 0.941249 Class avg mIOU: 0.811285 Inctance avg mIOU: 0.846373
+2021-03-27 02:31:46,301 - Model - INFO - Save model...
+2021-03-27 02:31:46,302 - Model - INFO - Saving at log/part_seg/pointnet2_part_seg_msg/checkpoints/best_model.pth
+2021-03-27 02:31:46,390 - Model - INFO - Saving model....
+2021-03-27 02:31:46,390 - Model - INFO - Best accuracy is: 0.94125
+2021-03-27 02:31:46,390 - Model - INFO - Best class avg mIOU is: 0.81296
+2021-03-27 02:31:46,390 - Model - INFO - Best inctance avg mIOU is: 0.84637
+2021-03-27 02:31:46,390 - Model - INFO - Epoch 44 (44/251):
+2021-03-27 02:31:46,390 - Model - INFO - Learning rate:0.000250
+2021-03-27 02:38:30,486 - Model - INFO - Train accuracy is: 0.94938
+2021-03-27 02:39:37,981 - Model - INFO - eval mIoU of Airplane 0.826803
+2021-03-27 02:39:37,981 - Model - INFO - eval mIoU of Bag 0.808968
+2021-03-27 02:39:37,981 - Model - INFO - eval mIoU of Cap 0.824969
+2021-03-27 02:39:37,981 - Model - INFO - eval mIoU of Car 0.765099
+2021-03-27 02:39:37,981 - Model - INFO - eval mIoU of Chair 0.893090
+2021-03-27 02:39:37,981 - Model - INFO - eval mIoU of Earphone 0.729449
+2021-03-27 02:39:37,981 - Model - INFO - eval mIoU of Guitar 0.906902
+2021-03-27 02:39:37,981 - Model - INFO - eval mIoU of Knife 0.820610
+2021-03-27 02:39:37,982 - Model - INFO - eval mIoU of Lamp 0.843952
+2021-03-27 02:39:37,982 - Model - INFO - eval mIoU of Laptop 0.954720
+2021-03-27 02:39:37,982 - Model - INFO - eval mIoU of Motorbike 0.642746
+2021-03-27 02:39:37,982 - Model - INFO - eval mIoU of Mug 0.947102
+2021-03-27 02:39:37,982 - Model - INFO - eval mIoU of Pistol 0.822499
+2021-03-27 02:39:37,982 - Model - INFO - eval mIoU of Rocket 0.574365
+2021-03-27 02:39:37,982 - Model - INFO - eval mIoU of Skateboard 0.763412
+2021-03-27 02:39:37,982 - Model - INFO - eval mIoU of Table 0.824833
+2021-03-27 02:39:37,982 - Model - INFO - Epoch 44 test Accuracy: 0.938936 Class avg mIOU: 0.809345 Inctance avg mIOU: 0.844679
+2021-03-27 02:39:37,982 - Model - INFO - Best accuracy is: 0.94125
+2021-03-27 02:39:37,982 - Model - INFO - Best class avg mIOU is: 0.81296
+2021-03-27 02:39:37,982 - Model - INFO - Best inctance avg mIOU is: 0.84637
+2021-03-27 02:39:37,983 - Model - INFO - Epoch 45 (45/251):
+2021-03-27 02:39:37,983 - Model - INFO - Learning rate:0.000250
+2021-03-27 02:46:21,326 - Model - INFO - Train accuracy is: 0.94955
+2021-03-27 02:47:28,710 - Model - INFO - eval mIoU of Airplane 0.796899
+2021-03-27 02:47:28,710 - Model - INFO - eval mIoU of Bag 0.794132
+2021-03-27 02:47:28,710 - Model - INFO - eval mIoU of Cap 0.858168
+2021-03-27 02:47:28,710 - Model - INFO - eval mIoU of Car 0.766346
+2021-03-27 02:47:28,710 - Model - INFO - eval mIoU of Chair 0.896523
+2021-03-27 02:47:28,710 - Model - INFO - eval mIoU of Earphone 0.738254
+2021-03-27 02:47:28,711 - Model - INFO - eval mIoU of Guitar 0.906420
+2021-03-27 02:47:28,711 - Model - INFO - eval mIoU of Knife 0.875002
+2021-03-27 02:47:28,711 - Model - INFO - eval mIoU of Lamp 0.840106
+2021-03-27 02:47:28,711 - Model - INFO - eval mIoU of Laptop 0.954769
+2021-03-27 02:47:28,711 - Model - INFO - eval mIoU of Motorbike 0.682138
+2021-03-27 02:47:28,711 - Model - INFO - eval mIoU of Mug 0.951229
+2021-03-27 02:47:28,711 - Model - INFO - eval mIoU of Pistol 0.820463
+2021-03-27 02:47:28,711 - Model - INFO - eval mIoU of Rocket 0.620361
+2021-03-27 02:47:28,712 - Model - INFO - eval mIoU of Skateboard 0.761213
+2021-03-27 02:47:28,712 - Model - INFO - eval mIoU of Table 0.821144
+2021-03-27 02:47:28,712 - Model - INFO - Epoch 45 test Accuracy: 0.940348 Class avg mIOU: 0.817698 Inctance avg mIOU: 0.843046
+2021-03-27 02:47:28,713 - Model - INFO - Best accuracy is: 0.94125
+2021-03-27 02:47:28,713 - Model - INFO - Best class avg mIOU is: 0.81770
+2021-03-27 02:47:28,713 - Model - INFO - Best inctance avg mIOU is: 0.84637
+2021-03-27 02:47:28,713 - Model - INFO - Epoch 46 (46/251):
+2021-03-27 02:47:28,713 - Model - INFO - Learning rate:0.000250
+2021-03-27 02:54:08,381 - Model - INFO - Train accuracy is: 0.94973
+2021-03-27 02:55:17,825 - Model - INFO - eval mIoU of Airplane 0.811564
+2021-03-27 02:55:17,825 - Model - INFO - eval mIoU of Bag 0.771876
+2021-03-27 02:55:17,825 - Model - INFO - eval mIoU of Cap 0.840625
+2021-03-27 02:55:17,825 - Model - INFO - eval mIoU of Car 0.739172
+2021-03-27 02:55:17,825 - Model - INFO - eval mIoU of Chair 0.892609
+2021-03-27 02:55:17,825 - Model - INFO - eval mIoU of Earphone 0.641255
+2021-03-27 02:55:17,825 - Model - INFO - eval mIoU of Guitar 0.895396
+2021-03-27 02:55:17,825 - Model - INFO - eval mIoU of Knife 0.876615
+2021-03-27 02:55:17,825 - Model - INFO - eval mIoU of Lamp 0.827047
+2021-03-27 02:55:17,826 - Model - INFO - eval mIoU of Laptop 0.953290
+2021-03-27 02:55:17,826 - Model - INFO - eval mIoU of Motorbike 0.674499
+2021-03-27 02:55:17,826 - Model - INFO - eval mIoU of Mug 0.950854
+2021-03-27 02:55:17,826 - Model - INFO - eval mIoU of Pistol 0.800942
+2021-03-27 02:55:17,826 - Model - INFO - eval mIoU of Rocket 0.588083
+2021-03-27 02:55:17,826 - Model - INFO - eval mIoU of Skateboard 0.742322
+2021-03-27 02:55:17,826 - Model - INFO - eval mIoU of Table 0.795416
+2021-03-27 02:55:17,826 - Model - INFO - Epoch 46 test Accuracy: 0.935722 Class avg mIOU: 0.800098 Inctance avg mIOU: 0.831409
+2021-03-27 02:55:17,826 - Model - INFO - Best accuracy is: 0.94125
+2021-03-27 02:55:17,826 - Model - INFO - Best class avg mIOU is: 0.81770
+2021-03-27 02:55:17,826 - Model - INFO - Best inctance avg mIOU is: 0.84637
+2021-03-27 02:55:17,827 - Model - INFO - Epoch 47 (47/251):
+2021-03-27 02:55:17,827 - Model - INFO - Learning rate:0.000250
+2021-03-27 03:02:06,647 - Model - INFO - Train accuracy is: 0.94968
+2021-03-27 03:03:13,184 - Model - INFO - eval mIoU of Airplane 0.817405
+2021-03-27 03:03:13,185 - Model - INFO - eval mIoU of Bag 0.798896
+2021-03-27 03:03:13,185 - Model - INFO - eval mIoU of Cap 0.880533
+2021-03-27 03:03:13,185 - Model - INFO - eval mIoU of Car 0.748941
+2021-03-27 03:03:13,185 - Model - INFO - eval mIoU of Chair 0.902893
+2021-03-27 03:03:13,185 - Model - INFO - eval mIoU of Earphone 0.773025
+2021-03-27 03:03:13,185 - Model - INFO - eval mIoU of Guitar 0.906350
+2021-03-27 03:03:13,185 - Model - INFO - eval mIoU of Knife 0.873170
+2021-03-27 03:03:13,185 - Model - INFO - eval mIoU of Lamp 0.846543
+2021-03-27 03:03:13,185 - Model - INFO - eval mIoU of Laptop 0.948979
+2021-03-27 03:03:13,185 - Model - INFO - eval mIoU of Motorbike 0.712881
+2021-03-27 03:03:13,186 - Model - INFO - eval mIoU of Mug 0.934229
+2021-03-27 03:03:13,186 - Model - INFO - eval mIoU of Pistol 0.807493
+2021-03-27 03:03:13,186 - Model - INFO - eval mIoU of Rocket 0.582471
+2021-03-27 03:03:13,186 - Model - INFO - eval mIoU of Skateboard 0.739891
+2021-03-27 03:03:13,186 - Model - INFO - eval mIoU of Table 0.822168
+2021-03-27 03:03:13,186 - Model - INFO - Epoch 47 test Accuracy: 0.941552 Class avg mIOU: 0.818492 Inctance avg mIOU: 0.846816
+2021-03-27 03:03:13,187 - Model - INFO - Save model...
+2021-03-27 03:03:13,187 - Model - INFO - Saving at log/part_seg/pointnet2_part_seg_msg/checkpoints/best_model.pth
+2021-03-27 03:03:13,269 - Model - INFO - Saving model....
+2021-03-27 03:03:13,269 - Model - INFO - Best accuracy is: 0.94155
+2021-03-27 03:03:13,269 - Model - INFO - Best class avg mIOU is: 0.81849
+2021-03-27 03:03:13,269 - Model - INFO - Best inctance avg mIOU is: 0.84682
+2021-03-27 03:03:13,269 - Model - INFO - Epoch 48 (48/251):
+2021-03-27 03:03:13,269 - Model - INFO - Learning rate:0.000250
+2021-03-27 03:09:49,796 - Model - INFO - Train accuracy is: 0.94995
+2021-03-27 03:10:56,425 - Model - INFO - eval mIoU of Airplane 0.810758
+2021-03-27 03:10:56,426 - Model - INFO - eval mIoU of Bag 0.771840
+2021-03-27 03:10:56,426 - Model - INFO - eval mIoU of Cap 0.787109
+2021-03-27 03:10:56,426 - Model - INFO - eval mIoU of Car 0.751898
+2021-03-27 03:10:56,426 - Model - INFO - eval mIoU of Chair 0.897308
+2021-03-27 03:10:56,426 - Model - INFO - eval mIoU of Earphone 0.711309
+2021-03-27 03:10:56,426 - Model - INFO - eval mIoU of Guitar 0.902136
+2021-03-27 03:10:56,426 - Model - INFO - eval mIoU of Knife 0.870026
+2021-03-27 03:10:56,426 - Model - INFO - eval mIoU of Lamp 0.831607
+2021-03-27 03:10:56,426 - Model - INFO - eval mIoU of Laptop 0.955344
+2021-03-27 03:10:56,426 - Model - INFO - eval mIoU of Motorbike 0.636601
+2021-03-27 03:10:56,426 - Model - INFO - eval mIoU of Mug 0.937411
+2021-03-27 03:10:56,426 - Model - INFO - eval mIoU of Pistol 0.778965
+2021-03-27 03:10:56,426 - Model - INFO - eval mIoU of Rocket 0.543611
+2021-03-27 03:10:56,426 - Model - INFO - eval mIoU of Skateboard 0.758716
+2021-03-27 03:10:56,426 - Model - INFO - eval mIoU of Table 0.815761
+2021-03-27 03:10:56,427 - Model - INFO - Epoch 48 test Accuracy: 0.938098 Class avg mIOU: 0.797525 Inctance avg mIOU: 0.838811
+2021-03-27 03:10:56,427 - Model - INFO - Best accuracy is: 0.94155
+2021-03-27 03:10:56,427 - Model - INFO - Best class avg mIOU is: 0.81849
+2021-03-27 03:10:56,427 - Model - INFO - Best inctance avg mIOU is: 0.84682
+2021-03-27 03:10:56,427 - Model - INFO - Epoch 49 (49/251):
+2021-03-27 03:10:56,427 - Model - INFO - Learning rate:0.000250
+2021-03-27 03:17:41,627 - Model - INFO - Train accuracy is: 0.94975
+2021-03-27 03:18:50,470 - Model - INFO - eval mIoU of Airplane 0.806491
+2021-03-27 03:18:50,470 - Model - INFO - eval mIoU of Bag 0.814543
+2021-03-27 03:18:50,470 - Model - INFO - eval mIoU of Cap 0.864840
+2021-03-27 03:18:50,471 - Model - INFO - eval mIoU of Car 0.768206
+2021-03-27 03:18:50,471 - Model - INFO - eval mIoU of Chair 0.897023
+2021-03-27 03:18:50,471 - Model - INFO - eval mIoU of Earphone 0.678477
+2021-03-27 03:18:50,471 - Model - INFO - eval mIoU of Guitar 0.904302
+2021-03-27 03:18:50,471 - Model - INFO - eval mIoU of Knife 0.860690
+2021-03-27 03:18:50,471 - Model - INFO - eval mIoU of Lamp 0.843177
+2021-03-27 03:18:50,471 - Model - INFO - eval mIoU of Laptop 0.955257
+2021-03-27 03:18:50,471 - Model - INFO - eval mIoU of Motorbike 0.692092
+2021-03-27 03:18:50,471 - Model - INFO - eval mIoU of Mug 0.948214
+2021-03-27 03:18:50,471 - Model - INFO - eval mIoU of Pistol 0.822234
+2021-03-27 03:18:50,471 - Model - INFO - eval mIoU of Rocket 0.623314
+2021-03-27 03:18:50,471 - Model - INFO - eval mIoU of Skateboard 0.755820
+2021-03-27 03:18:50,471 - Model - INFO - eval mIoU of Table 0.826220
+2021-03-27 03:18:50,472 - Model - INFO - Epoch 49 test Accuracy: 0.940478 Class avg mIOU: 0.816306 Inctance avg mIOU: 0.845663
+2021-03-27 03:18:50,472 - Model - INFO - Best accuracy is: 0.94155
+2021-03-27 03:18:50,472 - Model - INFO - Best class avg mIOU is: 0.81849
+2021-03-27 03:18:50,472 - Model - INFO - Best inctance avg mIOU is: 0.84682
+2021-03-27 03:18:50,472 - Model - INFO - Epoch 50 (50/251):
+2021-03-27 03:18:50,473 - Model - INFO - Learning rate:0.000250
+2021-03-27 03:25:34,672 - Model - INFO - Train accuracy is: 0.95015
+2021-03-27 03:26:42,443 - Model - INFO - eval mIoU of Airplane 0.821126
+2021-03-27 03:26:42,443 - Model - INFO - eval mIoU of Bag 0.794190
+2021-03-27 03:26:42,443 - Model - INFO - eval mIoU of Cap 0.860044
+2021-03-27 03:26:42,443 - Model - INFO - eval mIoU of Car 0.761331
+2021-03-27 03:26:42,443 - Model - INFO - eval mIoU of Chair 0.898068
+2021-03-27 03:26:42,443 - Model - INFO - eval mIoU of Earphone 0.748460
+2021-03-27 03:26:42,443 - Model - INFO - eval mIoU of Guitar 0.908976
+2021-03-27 03:26:42,443 - Model - INFO - eval mIoU of Knife 0.842486
+2021-03-27 03:26:42,443 - Model - INFO - eval mIoU of Lamp 0.841790
+2021-03-27 03:26:42,443 - Model - INFO - eval mIoU of Laptop 0.952750
+2021-03-27 03:26:42,443 - Model - INFO - eval mIoU of Motorbike 0.700483
+2021-03-27 03:26:42,443 - Model - INFO - eval mIoU of Mug 0.944835
+2021-03-27 03:26:42,443 - Model - INFO - eval mIoU of Pistol 0.797075
+2021-03-27 03:26:42,444 - Model - INFO - eval mIoU of Rocket 0.585718
+2021-03-27 03:26:42,444 - Model - INFO - eval mIoU of Skateboard 0.751380
+2021-03-27 03:26:42,444 - Model - INFO - eval mIoU of Table 0.825632
+2021-03-27 03:26:42,444 - Model - INFO - Epoch 50 test Accuracy: 0.940073 Class avg mIOU: 0.814646 Inctance avg mIOU: 0.846383
+2021-03-27 03:26:42,444 - Model - INFO - Best accuracy is: 0.94155
+2021-03-27 03:26:42,444 - Model - INFO - Best class avg mIOU is: 0.81849
+2021-03-27 03:26:42,444 - Model - INFO - Best inctance avg mIOU is: 0.84682
+2021-03-27 03:26:42,445 - Model - INFO - Epoch 51 (51/251):
+2021-03-27 03:26:42,445 - Model - INFO - Learning rate:0.000250
+2021-03-27 03:33:24,563 - Model - INFO - Train accuracy is: 0.95061
+2021-03-27 03:34:29,658 - Model - INFO - eval mIoU of Airplane 0.815073
+2021-03-27 03:34:29,658 - Model - INFO - eval mIoU of Bag 0.794729
+2021-03-27 03:34:29,658 - Model - INFO - eval mIoU of Cap 0.816918
+2021-03-27 03:34:29,658 - Model - INFO - eval mIoU of Car 0.755960
+2021-03-27 03:34:29,658 - Model - INFO - eval mIoU of Chair 0.901807
+2021-03-27 03:34:29,658 - Model - INFO - eval mIoU of Earphone 0.712941
+2021-03-27 03:34:29,658 - Model - INFO - eval mIoU of Guitar 0.903970
+2021-03-27 03:34:29,658 - Model - INFO - eval mIoU of Knife 0.870276
+2021-03-27 03:34:29,658 - Model - INFO - eval mIoU of Lamp 0.843433
+2021-03-27 03:34:29,658 - Model - INFO - eval mIoU of Laptop 0.951759
+2021-03-27 03:34:29,658 - Model - INFO - eval mIoU of Motorbike 0.713187
+2021-03-27 03:34:29,658 - Model - INFO - eval mIoU of Mug 0.940669
+2021-03-27 03:34:29,658 - Model - INFO - eval mIoU of Pistol 0.798316
+2021-03-27 03:34:29,658 - Model - INFO - eval mIoU of Rocket 0.593133
+2021-03-27 03:34:29,658 - Model - INFO - eval mIoU of Skateboard 0.738038
+2021-03-27 03:34:29,658 - Model - INFO - eval mIoU of Table 0.825158
+2021-03-27 03:34:29,659 - Model - INFO - Epoch 51 test Accuracy: 0.940792 Class avg mIOU: 0.810960 Inctance avg mIOU: 0.846518
+2021-03-27 03:34:29,659 - Model - INFO - Best accuracy is: 0.94155
+2021-03-27 03:34:29,659 - Model - INFO - Best class avg mIOU is: 0.81849
+2021-03-27 03:34:29,659 - Model - INFO - Best inctance avg mIOU is: 0.84682
+2021-03-27 03:34:29,659 - Model - INFO - Epoch 52 (52/251):
+2021-03-27 03:34:29,659 - Model - INFO - Learning rate:0.000250
+2021-03-27 03:41:17,887 - Model - INFO - Train accuracy is: 0.95034
+2021-03-27 03:42:24,367 - Model - INFO - eval mIoU of Airplane 0.823028
+2021-03-27 03:42:24,368 - Model - INFO - eval mIoU of Bag 0.780912
+2021-03-27 03:42:24,368 - Model - INFO - eval mIoU of Cap 0.859638
+2021-03-27 03:42:24,368 - Model - INFO - eval mIoU of Car 0.748567
+2021-03-27 03:42:24,368 - Model - INFO - eval mIoU of Chair 0.891041
+2021-03-27 03:42:24,368 - Model - INFO - eval mIoU of Earphone 0.798434
+2021-03-27 03:42:24,368 - Model - INFO - eval mIoU of Guitar 0.905818
+2021-03-27 03:42:24,368 - Model - INFO - eval mIoU of Knife 0.872767
+2021-03-27 03:42:24,368 - Model - INFO - eval mIoU of Lamp 0.828440
+2021-03-27 03:42:24,368 - Model - INFO - eval mIoU of Laptop 0.956793
+2021-03-27 03:42:24,368 - Model - INFO - eval mIoU of Motorbike 0.678795
+2021-03-27 03:42:24,368 - Model - INFO - eval mIoU of Mug 0.940224
+2021-03-27 03:42:24,369 - Model - INFO - eval mIoU of Pistol 0.778290
+2021-03-27 03:42:24,369 - Model - INFO - eval mIoU of Rocket 0.541083
+2021-03-27 03:42:24,369 - Model - INFO - eval mIoU of Skateboard 0.754664
+2021-03-27 03:42:24,369 - Model - INFO - eval mIoU of Table 0.825688
+2021-03-27 03:42:24,369 - Model - INFO - Epoch 52 test Accuracy: 0.940577 Class avg mIOU: 0.811511 Inctance avg mIOU: 0.842952
+2021-03-27 03:42:24,369 - Model - INFO - Best accuracy is: 0.94155
+2021-03-27 03:42:24,369 - Model - INFO - Best class avg mIOU is: 0.81849
+2021-03-27 03:42:24,370 - Model - INFO - Best inctance avg mIOU is: 0.84682
+2021-03-27 03:42:24,370 - Model - INFO - Epoch 53 (53/251):
+2021-03-27 03:42:24,370 - Model - INFO - Learning rate:0.000250
+2021-03-27 03:49:02,215 - Model - INFO - Train accuracy is: 0.94999
+2021-03-27 03:50:08,185 - Model - INFO - eval mIoU of Airplane 0.796573
+2021-03-27 03:50:08,186 - Model - INFO - eval mIoU of Bag 0.797803
+2021-03-27 03:50:08,186 - Model - INFO - eval mIoU of Cap 0.858509
+2021-03-27 03:50:08,186 - Model - INFO - eval mIoU of Car 0.761017
+2021-03-27 03:50:08,186 - Model - INFO - eval mIoU of Chair 0.900493
+2021-03-27 03:50:08,186 - Model - INFO - eval mIoU of Earphone 0.662371
+2021-03-27 03:50:08,186 - Model - INFO - eval mIoU of Guitar 0.905790
+2021-03-27 03:50:08,186 - Model - INFO - eval mIoU of Knife 0.870618
+2021-03-27 03:50:08,186 - Model - INFO - eval mIoU of Lamp 0.844733
+2021-03-27 03:50:08,186 - Model - INFO - eval mIoU of Laptop 0.954638
+2021-03-27 03:50:08,186 - Model - INFO - eval mIoU of Motorbike 0.717806
+2021-03-27 03:50:08,186 - Model - INFO - eval mIoU of Mug 0.947751
+2021-03-27 03:50:08,186 - Model - INFO - eval mIoU of Pistol 0.761567
+2021-03-27 03:50:08,186 - Model - INFO - eval mIoU of Rocket 0.540052
+2021-03-27 03:50:08,187 - Model - INFO - eval mIoU of Skateboard 0.754677
+2021-03-27 03:50:08,187 - Model - INFO - eval mIoU of Table 0.813901
+2021-03-27 03:50:08,187 - Model - INFO - Epoch 53 test Accuracy: 0.939494 Class avg mIOU: 0.805519 Inctance avg mIOU: 0.840779
+2021-03-27 03:50:08,187 - Model - INFO - Best accuracy is: 0.94155
+2021-03-27 03:50:08,187 - Model - INFO - Best class avg mIOU is: 0.81849
+2021-03-27 03:50:08,187 - Model - INFO - Best inctance avg mIOU is: 0.84682
+2021-03-27 03:50:08,187 - Model - INFO - Epoch 54 (54/251):
+2021-03-27 03:50:08,187 - Model - INFO - Learning rate:0.000250
+2021-03-27 03:56:50,340 - Model - INFO - Train accuracy is: 0.95045
+2021-03-27 03:57:56,745 - Model - INFO - eval mIoU of Airplane 0.813714
+2021-03-27 03:57:56,746 - Model - INFO - eval mIoU of Bag 0.768889
+2021-03-27 03:57:56,746 - Model - INFO - eval mIoU of Cap 0.836372
+2021-03-27 03:57:56,746 - Model - INFO - eval mIoU of Car 0.763709
+2021-03-27 03:57:56,746 - Model - INFO - eval mIoU of Chair 0.904763
+2021-03-27 03:57:56,746 - Model - INFO - eval mIoU of Earphone 0.723107
+2021-03-27 03:57:56,746 - Model - INFO - eval mIoU of Guitar 0.901302
+2021-03-27 03:57:56,746 - Model - INFO - eval mIoU of Knife 0.868382
+2021-03-27 03:57:56,746 - Model - INFO - eval mIoU of Lamp 0.838695
+2021-03-27 03:57:56,746 - Model - INFO - eval mIoU of Laptop 0.954893
+2021-03-27 03:57:56,746 - Model - INFO - eval mIoU of Motorbike 0.683377
+2021-03-27 03:57:56,746 - Model - INFO - eval mIoU of Mug 0.950545
+2021-03-27 03:57:56,746 - Model - INFO - eval mIoU of Pistol 0.816009
+2021-03-27 03:57:56,747 - Model - INFO - eval mIoU of Rocket 0.602194
+2021-03-27 03:57:56,747 - Model - INFO - eval mIoU of Skateboard 0.756737
+2021-03-27 03:57:56,747 - Model - INFO - eval mIoU of Table 0.819620
+2021-03-27 03:57:56,747 - Model - INFO - Epoch 54 test Accuracy: 0.940999 Class avg mIOU: 0.812644 Inctance avg mIOU: 0.845402
+2021-03-27 03:57:56,747 - Model - INFO - Best accuracy is: 0.94155
+2021-03-27 03:57:56,747 - Model - INFO - Best class avg mIOU is: 0.81849
+2021-03-27 03:57:56,747 - Model - INFO - Best inctance avg mIOU is: 0.84682
+2021-03-27 03:57:56,747 - Model - INFO - Epoch 55 (55/251):
+2021-03-27 03:57:56,748 - Model - INFO - Learning rate:0.000250
+2021-03-27 04:04:34,535 - Model - INFO - Train accuracy is: 0.95044
+2021-03-27 04:05:42,005 - Model - INFO - eval mIoU of Airplane 0.811475
+2021-03-27 04:05:42,006 - Model - INFO - eval mIoU of Bag 0.793292
+2021-03-27 04:05:42,006 - Model - INFO - eval mIoU of Cap 0.869387
+2021-03-27 04:05:42,006 - Model - INFO - eval mIoU of Car 0.773545
+2021-03-27 04:05:42,006 - Model - INFO - eval mIoU of Chair 0.901046
+2021-03-27 04:05:42,006 - Model - INFO - eval mIoU of Earphone 0.701783
+2021-03-27 04:05:42,006 - Model - INFO - eval mIoU of Guitar 0.896563
+2021-03-27 04:05:42,006 - Model - INFO - eval mIoU of Knife 0.865153
+2021-03-27 04:05:42,007 - Model - INFO - eval mIoU of Lamp 0.845839
+2021-03-27 04:05:42,007 - Model - INFO - eval mIoU of Laptop 0.953822
+2021-03-27 04:05:42,007 - Model - INFO - eval mIoU of Motorbike 0.694055
+2021-03-27 04:05:42,007 - Model - INFO - eval mIoU of Mug 0.950320
+2021-03-27 04:05:42,007 - Model - INFO - eval mIoU of Pistol 0.813714
+2021-03-27 04:05:42,007 - Model - INFO - eval mIoU of Rocket 0.610268
+2021-03-27 04:05:42,007 - Model - INFO - eval mIoU of Skateboard 0.749818
+2021-03-27 04:05:42,007 - Model - INFO - eval mIoU of Table 0.826072
+2021-03-27 04:05:42,008 - Model - INFO - Epoch 55 test Accuracy: 0.941643 Class avg mIOU: 0.816009 Inctance avg mIOU: 0.847250
+2021-03-27 04:05:42,008 - Model - INFO - Save model...
+2021-03-27 04:05:42,008 - Model - INFO - Saving at log/part_seg/pointnet2_part_seg_msg/checkpoints/best_model.pth
+2021-03-27 04:05:42,082 - Model - INFO - Saving model....
+2021-03-27 04:05:42,083 - Model - INFO - Best accuracy is: 0.94164
+2021-03-27 04:05:42,083 - Model - INFO - Best class avg mIOU is: 0.81849
+2021-03-27 04:05:42,083 - Model - INFO - Best inctance avg mIOU is: 0.84725
+2021-03-27 04:05:42,083 - Model - INFO - Epoch 56 (56/251):
+2021-03-27 04:05:42,084 - Model - INFO - Learning rate:0.000250
+2021-03-27 04:12:17,200 - Model - INFO - Train accuracy is: 0.95070
+2021-03-27 04:13:22,984 - Model - INFO - eval mIoU of Airplane 0.816727
+2021-03-27 04:13:22,984 - Model - INFO - eval mIoU of Bag 0.798371
+2021-03-27 04:13:22,984 - Model - INFO - eval mIoU of Cap 0.852804
+2021-03-27 04:13:22,985 - Model - INFO - eval mIoU of Car 0.770524
+2021-03-27 04:13:22,985 - Model - INFO - eval mIoU of Chair 0.897116
+2021-03-27 04:13:22,985 - Model - INFO - eval mIoU of Earphone 0.743655
+2021-03-27 04:13:22,985 - Model - INFO - eval mIoU of Guitar 0.907222
+2021-03-27 04:13:22,985 - Model - INFO - eval mIoU of Knife 0.876861
+2021-03-27 04:13:22,985 - Model - INFO - eval mIoU of Lamp 0.851336
+2021-03-27 04:13:22,985 - Model - INFO - eval mIoU of Laptop 0.952358
+2021-03-27 04:13:22,985 - Model - INFO - eval mIoU of Motorbike 0.690789
+2021-03-27 04:13:22,985 - Model - INFO - eval mIoU of Mug 0.945575
+2021-03-27 04:13:22,985 - Model - INFO - eval mIoU of Pistol 0.820625
+2021-03-27 04:13:22,985 - Model - INFO - eval mIoU of Rocket 0.618435
+2021-03-27 04:13:22,985 - Model - INFO - eval mIoU of Skateboard 0.759011
+2021-03-27 04:13:22,985 - Model - INFO - eval mIoU of Table 0.824890
+2021-03-27 04:13:22,986 - Model - INFO - Epoch 56 test Accuracy: 0.940046 Class avg mIOU: 0.820394 Inctance avg mIOU: 0.848099
+2021-03-27 04:13:22,986 - Model - INFO - Save model...
+2021-03-27 04:13:22,986 - Model - INFO - Saving at log/part_seg/pointnet2_part_seg_msg/checkpoints/best_model.pth
+2021-03-27 04:13:23,089 - Model - INFO - Saving model....
+2021-03-27 04:13:23,089 - Model - INFO - Best accuracy is: 0.94164
+2021-03-27 04:13:23,089 - Model - INFO - Best class avg mIOU is: 0.82039
+2021-03-27 04:13:23,089 - Model - INFO - Best inctance avg mIOU is: 0.84810
+2021-03-27 04:13:23,089 - Model - INFO - Epoch 57 (57/251):
+2021-03-27 04:13:23,089 - Model - INFO - Learning rate:0.000250
+2021-03-27 04:20:06,332 - Model - INFO - Train accuracy is: 0.95057
+2021-03-27 04:21:12,671 - Model - INFO - eval mIoU of Airplane 0.809371
+2021-03-27 04:21:12,671 - Model - INFO - eval mIoU of Bag 0.768090
+2021-03-27 04:21:12,671 - Model - INFO - eval mIoU of Cap 0.853789
+2021-03-27 04:21:12,671 - Model - INFO - eval mIoU of Car 0.767378
+2021-03-27 04:21:12,671 - Model - INFO - eval mIoU of Chair 0.892233
+2021-03-27 04:21:12,671 - Model - INFO - eval mIoU of Earphone 0.755381
+2021-03-27 04:21:12,671 - Model - INFO - eval mIoU of Guitar 0.905002
+2021-03-27 04:21:12,671 - Model - INFO - eval mIoU of Knife 0.873336
+2021-03-27 04:21:12,672 - Model - INFO - eval mIoU of Lamp 0.845554
+2021-03-27 04:21:12,672 - Model - INFO - eval mIoU of Laptop 0.954032
+2021-03-27 04:21:12,672 - Model - INFO - eval mIoU of Motorbike 0.718230
+2021-03-27 04:21:12,672 - Model - INFO - eval mIoU of Mug 0.952895
+2021-03-27 04:21:12,672 - Model - INFO - eval mIoU of Pistol 0.791837
+2021-03-27 04:21:12,672 - Model - INFO - eval mIoU of Rocket 0.591354
+2021-03-27 04:21:12,672 - Model - INFO - eval mIoU of Skateboard 0.758299
+2021-03-27 04:21:12,672 - Model - INFO - eval mIoU of Table 0.811500
+2021-03-27 04:21:12,673 - Model - INFO - Epoch 57 test Accuracy: 0.939016 Class avg mIOU: 0.815518 Inctance avg mIOU: 0.841094
+2021-03-27 04:21:12,673 - Model - INFO - Best accuracy is: 0.94164
+2021-03-27 04:21:12,673 - Model - INFO - Best class avg mIOU is: 0.82039
+2021-03-27 04:21:12,673 - Model - INFO - Best inctance avg mIOU is: 0.84810
+2021-03-27 04:21:12,673 - Model - INFO - Epoch 58 (58/251):
+2021-03-27 04:21:12,673 - Model - INFO - Learning rate:0.000250
+2021-03-27 04:27:54,306 - Model - INFO - Train accuracy is: 0.95069
+2021-03-27 04:29:02,855 - Model - INFO - eval mIoU of Airplane 0.818467
+2021-03-27 04:29:02,856 - Model - INFO - eval mIoU of Bag 0.812930
+2021-03-27 04:29:02,857 - Model - INFO - eval mIoU of Cap 0.803477
+2021-03-27 04:29:02,858 - Model - INFO - eval mIoU of Car 0.766419
+2021-03-27 04:29:02,859 - Model - INFO - eval mIoU of Chair 0.900834
+2021-03-27 04:29:02,859 - Model - INFO - eval mIoU of Earphone 0.769007
+2021-03-27 04:29:02,859 - Model - INFO - eval mIoU of Guitar 0.908516
+2021-03-27 04:29:02,860 - Model - INFO - eval mIoU of Knife 0.881099
+2021-03-27 04:29:02,860 - Model - INFO - eval mIoU of Lamp 0.835712
+2021-03-27 04:29:02,861 - Model - INFO - eval mIoU of Laptop 0.953925
+2021-03-27 04:29:02,861 - Model - INFO - eval mIoU of Motorbike 0.703330
+2021-03-27 04:29:02,861 - Model - INFO - eval mIoU of Mug 0.933902
+2021-03-27 04:29:02,862 - Model - INFO - eval mIoU of Pistol 0.802995
+2021-03-27 04:29:02,862 - Model - INFO - eval mIoU of Rocket 0.613196
+2021-03-27 04:29:02,863 - Model - INFO - eval mIoU of Skateboard 0.750875
+2021-03-27 04:29:02,863 - Model - INFO - eval mIoU of Table 0.825951
+2021-03-27 04:29:02,864 - Model - INFO - Epoch 58 test Accuracy: 0.940237 Class avg mIOU: 0.817540 Inctance avg mIOU: 0.847678
+2021-03-27 04:29:02,865 - Model - INFO - Best accuracy is: 0.94164
+2021-03-27 04:29:02,865 - Model - INFO - Best class avg mIOU is: 0.82039
+2021-03-27 04:29:02,865 - Model - INFO - Best inctance avg mIOU is: 0.84810
+2021-03-27 04:29:02,866 - Model - INFO - Epoch 59 (59/251):
+2021-03-27 04:29:02,866 - Model - INFO - Learning rate:0.000250
+2021-03-27 04:35:37,574 - Model - INFO - Train accuracy is: 0.95111
+2021-03-27 04:36:45,085 - Model - INFO - eval mIoU of Airplane 0.814011
+2021-03-27 04:36:45,086 - Model - INFO - eval mIoU of Bag 0.774979
+2021-03-27 04:36:45,086 - Model - INFO - eval mIoU of Cap 0.819307
+2021-03-27 04:36:45,086 - Model - INFO - eval mIoU of Car 0.761202
+2021-03-27 04:36:45,086 - Model - INFO - eval mIoU of Chair 0.904776
+2021-03-27 04:36:45,086 - Model - INFO - eval mIoU of Earphone 0.762100
+2021-03-27 04:36:45,086 - Model - INFO - eval mIoU of Guitar 0.909036
+2021-03-27 04:36:45,086 - Model - INFO - eval mIoU of Knife 0.871917
+2021-03-27 04:36:45,086 - Model - INFO - eval mIoU of Lamp 0.842770
+2021-03-27 04:36:45,086 - Model - INFO - eval mIoU of Laptop 0.955664
+2021-03-27 04:36:45,087 - Model - INFO - eval mIoU of Motorbike 0.682841
+2021-03-27 04:36:45,087 - Model - INFO - eval mIoU of Mug 0.933836
+2021-03-27 04:36:45,087 - Model - INFO - eval mIoU of Pistol 0.813101
+2021-03-27 04:36:45,087 - Model - INFO - eval mIoU of Rocket 0.573316
+2021-03-27 04:36:45,087 - Model - INFO - eval mIoU of Skateboard 0.762830
+2021-03-27 04:36:45,087 - Model - INFO - eval mIoU of Table 0.815407
+2021-03-27 04:36:45,088 - Model - INFO - Epoch 59 test Accuracy: 0.939384 Class avg mIOU: 0.812318 Inctance avg mIOU: 0.844838
+2021-03-27 04:36:45,088 - Model - INFO - Best accuracy is: 0.94164
+2021-03-27 04:36:45,088 - Model - INFO - Best class avg mIOU is: 0.82039
+2021-03-27 04:36:45,088 - Model - INFO - Best inctance avg mIOU is: 0.84810
+2021-03-27 04:36:45,089 - Model - INFO - Epoch 60 (60/251):
+2021-03-27 04:36:45,089 - Model - INFO - Learning rate:0.000250
+2021-03-27 04:43:26,733 - Model - INFO - Train accuracy is: 0.95054
+2021-03-27 04:44:32,876 - Model - INFO - eval mIoU of Airplane 0.809559
+2021-03-27 04:44:32,876 - Model - INFO - eval mIoU of Bag 0.805091
+2021-03-27 04:44:32,877 - Model - INFO - eval mIoU of Cap 0.879405
+2021-03-27 04:44:32,877 - Model - INFO - eval mIoU of Car 0.767637
+2021-03-27 04:44:32,877 - Model - INFO - eval mIoU of Chair 0.902669
+2021-03-27 04:44:32,877 - Model - INFO - eval mIoU of Earphone 0.690938
+2021-03-27 04:44:32,877 - Model - INFO - eval mIoU of Guitar 0.903044
+2021-03-27 04:44:32,877 - Model - INFO - eval mIoU of Knife 0.876821
+2021-03-27 04:44:32,877 - Model - INFO - eval mIoU of Lamp 0.846157
+2021-03-27 04:44:32,877 - Model - INFO - eval mIoU of Laptop 0.952677
+2021-03-27 04:44:32,877 - Model - INFO - eval mIoU of Motorbike 0.707700
+2021-03-27 04:44:32,877 - Model - INFO - eval mIoU of Mug 0.952138
+2021-03-27 04:44:32,877 - Model - INFO - eval mIoU of Pistol 0.786474
+2021-03-27 04:44:32,877 - Model - INFO - eval mIoU of Rocket 0.603154
+2021-03-27 04:44:32,877 - Model - INFO - eval mIoU of Skateboard 0.759871
+2021-03-27 04:44:32,877 - Model - INFO - eval mIoU of Table 0.827650
+2021-03-27 04:44:32,877 - Model - INFO - Epoch 60 test Accuracy: 0.941733 Class avg mIOU: 0.816937 Inctance avg mIOU: 0.848214
+2021-03-27 04:44:32,878 - Model - INFO - Save model...
+2021-03-27 04:44:32,878 - Model - INFO - Saving at log/part_seg/pointnet2_part_seg_msg/checkpoints/best_model.pth
+2021-03-27 04:44:32,951 - Model - INFO - Saving model....
+2021-03-27 04:44:32,951 - Model - INFO - Best accuracy is: 0.94173
+2021-03-27 04:44:32,951 - Model - INFO - Best class avg mIOU is: 0.82039
+2021-03-27 04:44:32,951 - Model - INFO - Best inctance avg mIOU is: 0.84821
+2021-03-27 04:44:32,951 - Model - INFO - Epoch 61 (61/251):
+2021-03-27 04:44:32,951 - Model - INFO - Learning rate:0.000125
+2021-03-27 04:51:13,584 - Model - INFO - Train accuracy is: 0.95213
+2021-03-27 04:52:19,949 - Model - INFO - eval mIoU of Airplane 0.822503
+2021-03-27 04:52:19,950 - Model - INFO - eval mIoU of Bag 0.794249
+2021-03-27 04:52:19,950 - Model - INFO - eval mIoU of Cap 0.811173
+2021-03-27 04:52:19,950 - Model - INFO - eval mIoU of Car 0.778046
+2021-03-27 04:52:19,950 - Model - INFO - eval mIoU of Chair 0.901062
+2021-03-27 04:52:19,950 - Model - INFO - eval mIoU of Earphone 0.737113
+2021-03-27 04:52:19,950 - Model - INFO - eval mIoU of Guitar 0.907511
+2021-03-27 04:52:19,950 - Model - INFO - eval mIoU of Knife 0.876159
+2021-03-27 04:52:19,950 - Model - INFO - eval mIoU of Lamp 0.840975
+2021-03-27 04:52:19,950 - Model - INFO - eval mIoU of Laptop 0.955479
+2021-03-27 04:52:19,950 - Model - INFO - eval mIoU of Motorbike 0.702792
+2021-03-27 04:52:19,950 - Model - INFO - eval mIoU of Mug 0.945227
+2021-03-27 04:52:19,950 - Model - INFO - eval mIoU of Pistol 0.819508
+2021-03-27 04:52:19,950 - Model - INFO - eval mIoU of Rocket 0.630675
+2021-03-27 04:52:19,950 - Model - INFO - eval mIoU of Skateboard 0.755138
+2021-03-27 04:52:19,950 - Model - INFO - eval mIoU of Table 0.823879
+2021-03-27 04:52:19,951 - Model - INFO - Epoch 61 test Accuracy: 0.942808 Class avg mIOU: 0.818843 Inctance avg mIOU: 0.848911
+2021-03-27 04:52:19,951 - Model - INFO - Save model...
+2021-03-27 04:52:19,951 - Model - INFO - Saving at log/part_seg/pointnet2_part_seg_msg/checkpoints/best_model.pth
+2021-03-27 04:52:20,022 - Model - INFO - Saving model....
+2021-03-27 04:52:20,022 - Model - INFO - Best accuracy is: 0.94281
+2021-03-27 04:52:20,023 - Model - INFO - Best class avg mIOU is: 0.82039
+2021-03-27 04:52:20,023 - Model - INFO - Best inctance avg mIOU is: 0.84891
+2021-03-27 04:52:20,023 - Model - INFO - Epoch 62 (62/251):
+2021-03-27 04:52:20,023 - Model - INFO - Learning rate:0.000125
+2021-03-27 04:59:02,806 - Model - INFO - Train accuracy is: 0.95255
+2021-03-27 05:00:10,847 - Model - INFO - eval mIoU of Airplane 0.823837
+2021-03-27 05:00:10,847 - Model - INFO - eval mIoU of Bag 0.796665
+2021-03-27 05:00:10,848 - Model - INFO - eval mIoU of Cap 0.816373
+2021-03-27 05:00:10,848 - Model - INFO - eval mIoU of Car 0.759232
+2021-03-27 05:00:10,848 - Model - INFO - eval mIoU of Chair 0.904878
+2021-03-27 05:00:10,848 - Model - INFO - eval mIoU of Earphone 0.693029
+2021-03-27 05:00:10,848 - Model - INFO - eval mIoU of Guitar 0.904589
+2021-03-27 05:00:10,848 - Model - INFO - eval mIoU of Knife 0.863891
+2021-03-27 05:00:10,848 - Model - INFO - eval mIoU of Lamp 0.844072
+2021-03-27 05:00:10,848 - Model - INFO - eval mIoU of Laptop 0.952437
+2021-03-27 05:00:10,848 - Model - INFO - eval mIoU of Motorbike 0.712324
+2021-03-27 05:00:10,848 - Model - INFO - eval mIoU of Mug 0.944153
+2021-03-27 05:00:10,848 - Model - INFO - eval mIoU of Pistol 0.817579
+2021-03-27 05:00:10,848 - Model - INFO - eval mIoU of Rocket 0.617159
+2021-03-27 05:00:10,848 - Model - INFO - eval mIoU of Skateboard 0.750955
+2021-03-27 05:00:10,848 - Model - INFO - eval mIoU of Table 0.817937
+2021-03-27 05:00:10,849 - Model - INFO - Epoch 62 test Accuracy: 0.941325 Class avg mIOU: 0.813694 Inctance avg mIOU: 0.846775
+2021-03-27 05:00:10,849 - Model - INFO - Best accuracy is: 0.94281
+2021-03-27 05:00:10,849 - Model - INFO - Best class avg mIOU is: 0.82039
+2021-03-27 05:00:10,849 - Model - INFO - Best inctance avg mIOU is: 0.84891
+2021-03-27 05:00:10,849 - Model - INFO - Epoch 63 (63/251):
+2021-03-27 05:00:10,849 - Model - INFO - Learning rate:0.000125
+2021-03-27 05:06:50,091 - Model - INFO - Train accuracy is: 0.95279
+2021-03-27 05:07:57,685 - Model - INFO - eval mIoU of Airplane 0.803051
+2021-03-27 05:07:57,685 - Model - INFO - eval mIoU of Bag 0.787527
+2021-03-27 05:07:57,685 - Model - INFO - eval mIoU of Cap 0.846420
+2021-03-27 05:07:57,685 - Model - INFO - eval mIoU of Car 0.764706
+2021-03-27 05:07:57,686 - Model - INFO - eval mIoU of Chair 0.898895
+2021-03-27 05:07:57,686 - Model - INFO - eval mIoU of Earphone 0.709902
+2021-03-27 05:07:57,686 - Model - INFO - eval mIoU of Guitar 0.906766
+2021-03-27 05:07:57,686 - Model - INFO - eval mIoU of Knife 0.867930
+2021-03-27 05:07:57,686 - Model - INFO - eval mIoU of Lamp 0.841781
+2021-03-27 05:07:57,686 - Model - INFO - eval mIoU of Laptop 0.956261
+2021-03-27 05:07:57,686 - Model - INFO - eval mIoU of Motorbike 0.731405
+2021-03-27 05:07:57,686 - Model - INFO - eval mIoU of Mug 0.938100
+2021-03-27 05:07:57,686 - Model - INFO - eval mIoU of Pistol 0.828899
+2021-03-27 05:07:57,686 - Model - INFO - eval mIoU of Rocket 0.581318
+2021-03-27 05:07:57,686 - Model - INFO - eval mIoU of Skateboard 0.754764
+2021-03-27 05:07:57,686 - Model - INFO - eval mIoU of Table 0.821350
+2021-03-27 05:07:57,686 - Model - INFO - Epoch 63 test Accuracy: 0.941591 Class avg mIOU: 0.814942 Inctance avg mIOU: 0.844742
+2021-03-27 05:07:57,686 - Model - INFO - Best accuracy is: 0.94281
+2021-03-27 05:07:57,687 - Model - INFO - Best class avg mIOU is: 0.82039
+2021-03-27 05:07:57,687 - Model - INFO - Best inctance avg mIOU is: 0.84891
+2021-03-27 05:07:57,687 - Model - INFO - Epoch 64 (64/251):
+2021-03-27 05:07:57,687 - Model - INFO - Learning rate:0.000125
+2021-03-27 05:14:33,629 - Model - INFO - Train accuracy is: 0.95273
+2021-03-27 05:15:41,564 - Model - INFO - eval mIoU of Airplane 0.817140
+2021-03-27 05:15:41,564 - Model - INFO - eval mIoU of Bag 0.837234
+2021-03-27 05:15:41,564 - Model - INFO - eval mIoU of Cap 0.859753
+2021-03-27 05:15:41,564 - Model - INFO - eval mIoU of Car 0.770087
+2021-03-27 05:15:41,564 - Model - INFO - eval mIoU of Chair 0.903628
+2021-03-27 05:15:41,564 - Model - INFO - eval mIoU of Earphone 0.676808
+2021-03-27 05:15:41,565 - Model - INFO - eval mIoU of Guitar 0.907227
+2021-03-27 05:15:41,565 - Model - INFO - eval mIoU of Knife 0.872350
+2021-03-27 05:15:41,565 - Model - INFO - eval mIoU of Lamp 0.832028
+2021-03-27 05:15:41,565 - Model - INFO - eval mIoU of Laptop 0.952456
+2021-03-27 05:15:41,565 - Model - INFO - eval mIoU of Motorbike 0.707030
+2021-03-27 05:15:41,565 - Model - INFO - eval mIoU of Mug 0.936491
+2021-03-27 05:15:41,565 - Model - INFO - eval mIoU of Pistol 0.821073
+2021-03-27 05:15:41,565 - Model - INFO - eval mIoU of Rocket 0.569474
+2021-03-27 05:15:41,565 - Model - INFO - eval mIoU of Skateboard 0.764982
+2021-03-27 05:15:41,565 - Model - INFO - eval mIoU of Table 0.826624
+2021-03-27 05:15:41,566 - Model - INFO - Epoch 64 test Accuracy: 0.940073 Class avg mIOU: 0.815899 Inctance avg mIOU: 0.848112
+2021-03-27 05:15:41,566 - Model - INFO - Best accuracy is: 0.94281
+2021-03-27 05:15:41,566 - Model - INFO - Best class avg mIOU is: 0.82039
+2021-03-27 05:15:41,566 - Model - INFO - Best inctance avg mIOU is: 0.84891
+2021-03-27 05:15:41,566 - Model - INFO - Epoch 65 (65/251):
+2021-03-27 05:15:41,566 - Model - INFO - Learning rate:0.000125
+2021-03-27 05:22:31,143 - Model - INFO - Train accuracy is: 0.95297
+2021-03-27 05:23:39,650 - Model - INFO - eval mIoU of Airplane 0.829589
+2021-03-27 05:23:39,650 - Model - INFO - eval mIoU of Bag 0.824532
+2021-03-27 05:23:39,651 - Model - INFO - eval mIoU of Cap 0.843584
+2021-03-27 05:23:39,651 - Model - INFO - eval mIoU of Car 0.782330
+2021-03-27 05:23:39,651 - Model - INFO - eval mIoU of Chair 0.906435
+2021-03-27 05:23:39,651 - Model - INFO - eval mIoU of Earphone 0.755102
+2021-03-27 05:23:39,651 - Model - INFO - eval mIoU of Guitar 0.909842
+2021-03-27 05:23:39,651 - Model - INFO - eval mIoU of Knife 0.864096
+2021-03-27 05:23:39,651 - Model - INFO - eval mIoU of Lamp 0.845510
+2021-03-27 05:23:39,651 - Model - INFO - eval mIoU of Laptop 0.951368
+2021-03-27 05:23:39,651 - Model - INFO - eval mIoU of Motorbike 0.683127
+2021-03-27 05:23:39,651 - Model - INFO - eval mIoU of Mug 0.947693
+2021-03-27 05:23:39,651 - Model - INFO - eval mIoU of Pistol 0.809116
+2021-03-27 05:23:39,651 - Model - INFO - eval mIoU of Rocket 0.583871
+2021-03-27 05:23:39,651 - Model - INFO - eval mIoU of Skateboard 0.768516
+2021-03-27 05:23:39,651 - Model - INFO - eval mIoU of Table 0.823471
+2021-03-27 05:23:39,652 - Model - INFO - Epoch 65 test Accuracy: 0.942812 Class avg mIOU: 0.820511 Inctance avg mIOU: 0.851142
+2021-03-27 05:23:39,652 - Model - INFO - Save model...
+2021-03-27 05:23:39,652 - Model - INFO - Saving at log/part_seg/pointnet2_part_seg_msg/checkpoints/best_model.pth
+2021-03-27 05:23:39,729 - Model - INFO - Saving model....
+2021-03-27 05:23:39,729 - Model - INFO - Best accuracy is: 0.94281
+2021-03-27 05:23:39,729 - Model - INFO - Best class avg mIOU is: 0.82051
+2021-03-27 05:23:39,729 - Model - INFO - Best inctance avg mIOU is: 0.85114
+2021-03-27 05:23:39,730 - Model - INFO - Epoch 66 (66/251):
+2021-03-27 05:23:39,730 - Model - INFO - Learning rate:0.000125
+2021-03-27 05:30:22,613 - Model - INFO - Train accuracy is: 0.95285
+2021-03-27 05:31:28,213 - Model - INFO - eval mIoU of Airplane 0.811840
+2021-03-27 05:31:28,214 - Model - INFO - eval mIoU of Bag 0.807263
+2021-03-27 05:31:28,214 - Model - INFO - eval mIoU of Cap 0.853696
+2021-03-27 05:31:28,214 - Model - INFO - eval mIoU of Car 0.775692
+2021-03-27 05:31:28,215 - Model - INFO - eval mIoU of Chair 0.907610
+2021-03-27 05:31:28,215 - Model - INFO - eval mIoU of Earphone 0.734897
+2021-03-27 05:31:28,215 - Model - INFO - eval mIoU of Guitar 0.909289
+2021-03-27 05:31:28,215 - Model - INFO - eval mIoU of Knife 0.866202
+2021-03-27 05:31:28,215 - Model - INFO - eval mIoU of Lamp 0.848111
+2021-03-27 05:31:28,215 - Model - INFO - eval mIoU of Laptop 0.954721
+2021-03-27 05:31:28,215 - Model - INFO - eval mIoU of Motorbike 0.726164
+2021-03-27 05:31:28,216 - Model - INFO - eval mIoU of Mug 0.949439
+2021-03-27 05:31:28,216 - Model - INFO - eval mIoU of Pistol 0.820021
+2021-03-27 05:31:28,216 - Model - INFO - eval mIoU of Rocket 0.603297
+2021-03-27 05:31:28,216 - Model - INFO - eval mIoU of Skateboard 0.751400
+2021-03-27 05:31:28,216 - Model - INFO - eval mIoU of Table 0.823653
+2021-03-27 05:31:28,217 - Model - INFO - Epoch 66 test Accuracy: 0.941182 Class avg mIOU: 0.821456 Inctance avg mIOU: 0.850102
+2021-03-27 05:31:28,217 - Model - INFO - Best accuracy is: 0.94281
+2021-03-27 05:31:28,217 - Model - INFO - Best class avg mIOU is: 0.82146
+2021-03-27 05:31:28,217 - Model - INFO - Best inctance avg mIOU is: 0.85114
+2021-03-27 05:31:28,217 - Model - INFO - Epoch 67 (67/251):
+2021-03-27 05:31:28,217 - Model - INFO - Learning rate:0.000125
+2021-03-27 05:38:10,220 - Model - INFO - Train accuracy is: 0.95305
+2021-03-27 05:39:19,725 - Model - INFO - eval mIoU of Airplane 0.822756
+2021-03-27 05:39:19,725 - Model - INFO - eval mIoU of Bag 0.765644
+2021-03-27 05:39:19,726 - Model - INFO - eval mIoU of Cap 0.828786
+2021-03-27 05:39:19,726 - Model - INFO - eval mIoU of Car 0.772249
+2021-03-27 05:39:19,726 - Model - INFO - eval mIoU of Chair 0.902506
+2021-03-27 05:39:19,726 - Model - INFO - eval mIoU of Earphone 0.731356
+2021-03-27 05:39:19,726 - Model - INFO - eval mIoU of Guitar 0.904644
+2021-03-27 05:39:19,726 - Model - INFO - eval mIoU of Knife 0.871413
+2021-03-27 05:39:19,726 - Model - INFO - eval mIoU of Lamp 0.849365
+2021-03-27 05:39:19,726 - Model - INFO - eval mIoU of Laptop 0.950361
+2021-03-27 05:39:19,726 - Model - INFO - eval mIoU of Motorbike 0.689654
+2021-03-27 05:39:19,726 - Model - INFO - eval mIoU of Mug 0.953168
+2021-03-27 05:39:19,726 - Model - INFO - eval mIoU of Pistol 0.823450
+2021-03-27 05:39:19,726 - Model - INFO - eval mIoU of Rocket 0.595233
+2021-03-27 05:39:19,726 - Model - INFO - eval mIoU of Skateboard 0.765461
+2021-03-27 05:39:19,726 - Model - INFO - eval mIoU of Table 0.823721
+2021-03-27 05:39:19,727 - Model - INFO - Epoch 67 test Accuracy: 0.941533 Class avg mIOU: 0.815610 Inctance avg mIOU: 0.849122
+2021-03-27 05:39:19,727 - Model - INFO - Best accuracy is: 0.94281
+2021-03-27 05:39:19,727 - Model - INFO - Best class avg mIOU is: 0.82146
+2021-03-27 05:39:19,727 - Model - INFO - Best inctance avg mIOU is: 0.85114
+2021-03-27 05:39:19,727 - Model - INFO - Epoch 68 (68/251):
+2021-03-27 05:39:19,728 - Model - INFO - Learning rate:0.000125
+2021-03-27 05:46:04,603 - Model - INFO - Train accuracy is: 0.95352
+2021-03-27 05:47:11,296 - Model - INFO - eval mIoU of Airplane 0.825281
+2021-03-27 05:47:11,297 - Model - INFO - eval mIoU of Bag 0.797653
+2021-03-27 05:47:11,297 - Model - INFO - eval mIoU of Cap 0.850524
+2021-03-27 05:47:11,297 - Model - INFO - eval mIoU of Car 0.760411
+2021-03-27 05:47:11,297 - Model - INFO - eval mIoU of Chair 0.905746
+2021-03-27 05:47:11,297 - Model - INFO - eval mIoU of Earphone 0.744410
+2021-03-27 05:47:11,297 - Model - INFO - eval mIoU of Guitar 0.904189
+2021-03-27 05:47:11,297 - Model - INFO - eval mIoU of Knife 0.872449
+2021-03-27 05:47:11,297 - Model - INFO - eval mIoU of Lamp 0.849841
+2021-03-27 05:47:11,297 - Model - INFO - eval mIoU of Laptop 0.949483
+2021-03-27 05:47:11,297 - Model - INFO - eval mIoU of Motorbike 0.702218
+2021-03-27 05:47:11,297 - Model - INFO - eval mIoU of Mug 0.933788
+2021-03-27 05:47:11,297 - Model - INFO - eval mIoU of Pistol 0.827438
+2021-03-27 05:47:11,297 - Model - INFO - eval mIoU of Rocket 0.568061
+2021-03-27 05:47:11,297 - Model - INFO - eval mIoU of Skateboard 0.764376
+2021-03-27 05:47:11,297 - Model - INFO - eval mIoU of Table 0.821218
+2021-03-27 05:47:11,298 - Model - INFO - Epoch 68 test Accuracy: 0.941158 Class avg mIOU: 0.817318 Inctance avg mIOU: 0.849057
+2021-03-27 05:47:11,298 - Model - INFO - Best accuracy is: 0.94281
+2021-03-27 05:47:11,298 - Model - INFO - Best class avg mIOU is: 0.82146
+2021-03-27 05:47:11,298 - Model - INFO - Best inctance avg mIOU is: 0.85114
+2021-03-27 05:47:11,298 - Model - INFO - Epoch 69 (69/251):
+2021-03-27 05:47:11,298 - Model - INFO - Learning rate:0.000125
+2021-03-27 05:53:48,838 - Model - INFO - Train accuracy is: 0.95288
+2021-03-27 05:54:56,402 - Model - INFO - eval mIoU of Airplane 0.822699
+2021-03-27 05:54:56,402 - Model - INFO - eval mIoU of Bag 0.805097
+2021-03-27 05:54:56,402 - Model - INFO - eval mIoU of Cap 0.845431
+2021-03-27 05:54:56,402 - Model - INFO - eval mIoU of Car 0.774310
+2021-03-27 05:54:56,402 - Model - INFO - eval mIoU of Chair 0.903160
+2021-03-27 05:54:56,402 - Model - INFO - eval mIoU of Earphone 0.653158
+2021-03-27 05:54:56,403 - Model - INFO - eval mIoU of Guitar 0.904717
+2021-03-27 05:54:56,403 - Model - INFO - eval mIoU of Knife 0.861641
+2021-03-27 05:54:56,403 - Model - INFO - eval mIoU of Lamp 0.844460
+2021-03-27 05:54:56,403 - Model - INFO - eval mIoU of Laptop 0.951813
+2021-03-27 05:54:56,403 - Model - INFO - eval mIoU of Motorbike 0.711065
+2021-03-27 05:54:56,403 - Model - INFO - eval mIoU of Mug 0.955255
+2021-03-27 05:54:56,403 - Model - INFO - eval mIoU of Pistol 0.822414
+2021-03-27 05:54:56,403 - Model - INFO - eval mIoU of Rocket 0.615620
+2021-03-27 05:54:56,403 - Model - INFO - eval mIoU of Skateboard 0.758891
+2021-03-27 05:54:56,403 - Model - INFO - eval mIoU of Table 0.826571
+2021-03-27 05:54:56,403 - Model - INFO - Epoch 69 test Accuracy: 0.941147 Class avg mIOU: 0.816019 Inctance avg mIOU: 0.849796
+2021-03-27 05:54:56,403 - Model - INFO - Best accuracy is: 0.94281
+2021-03-27 05:54:56,403 - Model - INFO - Best class avg mIOU is: 0.82146
+2021-03-27 05:54:56,403 - Model - INFO - Best inctance avg mIOU is: 0.85114
+2021-03-27 05:54:56,403 - Model - INFO - Epoch 70 (70/251):
+2021-03-27 05:54:56,404 - Model - INFO - Learning rate:0.000125
+2021-03-27 06:01:36,526 - Model - INFO - Train accuracy is: 0.95335
+2021-03-27 06:02:45,074 - Model - INFO - eval mIoU of Airplane 0.821981
+2021-03-27 06:02:45,074 - Model - INFO - eval mIoU of Bag 0.805649
+2021-03-27 06:02:45,074 - Model - INFO - eval mIoU of Cap 0.864244
+2021-03-27 06:02:45,074 - Model - INFO - eval mIoU of Car 0.758073
+2021-03-27 06:02:45,074 - Model - INFO - eval mIoU of Chair 0.901936
+2021-03-27 06:02:45,074 - Model - INFO - eval mIoU of Earphone 0.704343
+2021-03-27 06:02:45,074 - Model - INFO - eval mIoU of Guitar 0.909434
+2021-03-27 06:02:45,074 - Model - INFO - eval mIoU of Knife 0.873716
+2021-03-27 06:02:45,074 - Model - INFO - eval mIoU of Lamp 0.842102
+2021-03-27 06:02:45,074 - Model - INFO - eval mIoU of Laptop 0.953898
+2021-03-27 06:02:45,074 - Model - INFO - eval mIoU of Motorbike 0.701986
+2021-03-27 06:02:45,074 - Model - INFO - eval mIoU of Mug 0.945636
+2021-03-27 06:02:45,074 - Model - INFO - eval mIoU of Pistol 0.823102
+2021-03-27 06:02:45,074 - Model - INFO - eval mIoU of Rocket 0.619397
+2021-03-27 06:02:45,074 - Model - INFO - eval mIoU of Skateboard 0.758292
+2021-03-27 06:02:45,075 - Model - INFO - eval mIoU of Table 0.826358
+2021-03-27 06:02:45,075 - Model - INFO - Epoch 70 test Accuracy: 0.942403 Class avg mIOU: 0.819384 Inctance avg mIOU: 0.848934
+2021-03-27 06:02:45,075 - Model - INFO - Best accuracy is: 0.94281
+2021-03-27 06:02:45,075 - Model - INFO - Best class avg mIOU is: 0.82146
+2021-03-27 06:02:45,075 - Model - INFO - Best inctance avg mIOU is: 0.85114
+2021-03-27 06:02:45,075 - Model - INFO - Epoch 71 (71/251):
+2021-03-27 06:02:45,075 - Model - INFO - Learning rate:0.000125
+2021-03-27 06:09:31,125 - Model - INFO - Train accuracy is: 0.95307
+2021-03-27 06:10:38,343 - Model - INFO - eval mIoU of Airplane 0.826336
+2021-03-27 06:10:38,344 - Model - INFO - eval mIoU of Bag 0.803349
+2021-03-27 06:10:38,344 - Model - INFO - eval mIoU of Cap 0.862131
+2021-03-27 06:10:38,344 - Model - INFO - eval mIoU of Car 0.770807
+2021-03-27 06:10:38,344 - Model - INFO - eval mIoU of Chair 0.901528
+2021-03-27 06:10:38,344 - Model - INFO - eval mIoU of Earphone 0.741946
+2021-03-27 06:10:38,344 - Model - INFO - eval mIoU of Guitar 0.909548
+2021-03-27 06:10:38,345 - Model - INFO - eval mIoU of Knife 0.872717
+2021-03-27 06:10:38,345 - Model - INFO - eval mIoU of Lamp 0.843121
+2021-03-27 06:10:38,345 - Model - INFO - eval mIoU of Laptop 0.952199
+2021-03-27 06:10:38,345 - Model - INFO - eval mIoU of Motorbike 0.697188
+2021-03-27 06:10:38,345 - Model - INFO - eval mIoU of Mug 0.938213
+2021-03-27 06:10:38,345 - Model - INFO - eval mIoU of Pistol 0.834230
+2021-03-27 06:10:38,345 - Model - INFO - eval mIoU of Rocket 0.611881
+2021-03-27 06:10:38,345 - Model - INFO - eval mIoU of Skateboard 0.757109
+2021-03-27 06:10:38,345 - Model - INFO - eval mIoU of Table 0.823867
+2021-03-27 06:10:38,346 - Model - INFO - Epoch 71 test Accuracy: 0.941764 Class avg mIOU: 0.821636 Inctance avg mIOU: 0.849453
+2021-03-27 06:10:38,346 - Model - INFO - Best accuracy is: 0.94281
+2021-03-27 06:10:38,346 - Model - INFO - Best class avg mIOU is: 0.82164
+2021-03-27 06:10:38,346 - Model - INFO - Best inctance avg mIOU is: 0.85114
+2021-03-27 06:10:38,346 - Model - INFO - Epoch 72 (72/251):
+2021-03-27 06:10:38,346 - Model - INFO - Learning rate:0.000125
+2021-03-27 06:17:22,623 - Model - INFO - Train accuracy is: 0.95327
+2021-03-27 06:18:31,048 - Model - INFO - eval mIoU of Airplane 0.811287
+2021-03-27 06:18:31,048 - Model - INFO - eval mIoU of Bag 0.810969
+2021-03-27 06:18:31,048 - Model - INFO - eval mIoU of Cap 0.851811
+2021-03-27 06:18:31,048 - Model - INFO - eval mIoU of Car 0.773610
+2021-03-27 06:18:31,048 - Model - INFO - eval mIoU of Chair 0.900754
+2021-03-27 06:18:31,048 - Model - INFO - eval mIoU of Earphone 0.681475
+2021-03-27 06:18:31,049 - Model - INFO - eval mIoU of Guitar 0.912840
+2021-03-27 06:18:31,049 - Model - INFO - eval mIoU of Knife 0.872022
+2021-03-27 06:18:31,049 - Model - INFO - eval mIoU of Lamp 0.844736
+2021-03-27 06:18:31,049 - Model - INFO - eval mIoU of Laptop 0.954894
+2021-03-27 06:18:31,049 - Model - INFO - eval mIoU of Motorbike 0.689794
+2021-03-27 06:18:31,049 - Model - INFO - eval mIoU of Mug 0.929088
+2021-03-27 06:18:31,049 - Model - INFO - eval mIoU of Pistol 0.823035
+2021-03-27 06:18:31,049 - Model - INFO - eval mIoU of Rocket 0.585372
+2021-03-27 06:18:31,049 - Model - INFO - eval mIoU of Skateboard 0.760176
+2021-03-27 06:18:31,049 - Model - INFO - eval mIoU of Table 0.816790
+2021-03-27 06:18:31,050 - Model - INFO - Epoch 72 test Accuracy: 0.941472 Class avg mIOU: 0.813666 Inctance avg mIOU: 0.845148
+2021-03-27 06:18:31,050 - Model - INFO - Best accuracy is: 0.94281
+2021-03-27 06:18:31,050 - Model - INFO - Best class avg mIOU is: 0.82164
+2021-03-27 06:18:31,050 - Model - INFO - Best inctance avg mIOU is: 0.85114
+2021-03-27 06:18:31,050 - Model - INFO - Epoch 73 (73/251):
+2021-03-27 06:18:31,050 - Model - INFO - Learning rate:0.000125
+2021-03-27 06:25:12,431 - Model - INFO - Train accuracy is: 0.95313
+2021-03-27 06:26:20,737 - Model - INFO - eval mIoU of Airplane 0.830256
+2021-03-27 06:26:20,738 - Model - INFO - eval mIoU of Bag 0.810151
+2021-03-27 06:26:20,738 - Model - INFO - eval mIoU of Cap 0.879232
+2021-03-27 06:26:20,738 - Model - INFO - eval mIoU of Car 0.768950
+2021-03-27 06:26:20,738 - Model - INFO - eval mIoU of Chair 0.906471
+2021-03-27 06:26:20,738 - Model - INFO - eval mIoU of Earphone 0.718440
+2021-03-27 06:26:20,738 - Model - INFO - eval mIoU of Guitar 0.909905
+2021-03-27 06:26:20,738 - Model - INFO - eval mIoU of Knife 0.871889
+2021-03-27 06:26:20,738 - Model - INFO - eval mIoU of Lamp 0.843473
+2021-03-27 06:26:20,738 - Model - INFO - eval mIoU of Laptop 0.953884
+2021-03-27 06:26:20,738 - Model - INFO - eval mIoU of Motorbike 0.714038
+2021-03-27 06:26:20,738 - Model - INFO - eval mIoU of Mug 0.934742
+2021-03-27 06:26:20,738 - Model - INFO - eval mIoU of Pistol 0.813415
+2021-03-27 06:26:20,738 - Model - INFO - eval mIoU of Rocket 0.561156
+2021-03-27 06:26:20,739 - Model - INFO - eval mIoU of Skateboard 0.757572
+2021-03-27 06:26:20,739 - Model - INFO - eval mIoU of Table 0.818335
+2021-03-27 06:26:20,739 - Model - INFO - Epoch 73 test Accuracy: 0.941269 Class avg mIOU: 0.818244 Inctance avg mIOU: 0.849187
+2021-03-27 06:26:20,739 - Model - INFO - Best accuracy is: 0.94281
+2021-03-27 06:26:20,739 - Model - INFO - Best class avg mIOU is: 0.82164
+2021-03-27 06:26:20,740 - Model - INFO - Best inctance avg mIOU is: 0.85114
+2021-03-27 06:26:20,740 - Model - INFO - Epoch 74 (74/251):
+2021-03-27 06:26:20,740 - Model - INFO - Learning rate:0.000125
+2021-03-27 06:33:02,879 - Model - INFO - Train accuracy is: 0.95350
+2021-03-27 06:34:10,690 - Model - INFO - eval mIoU of Airplane 0.825204
+2021-03-27 06:34:10,691 - Model - INFO - eval mIoU of Bag 0.808293
+2021-03-27 06:34:10,691 - Model - INFO - eval mIoU of Cap 0.847800
+2021-03-27 06:34:10,691 - Model - INFO - eval mIoU of Car 0.776786
+2021-03-27 06:34:10,691 - Model - INFO - eval mIoU of Chair 0.902725
+2021-03-27 06:34:10,691 - Model - INFO - eval mIoU of Earphone 0.698575
+2021-03-27 06:34:10,691 - Model - INFO - eval mIoU of Guitar 0.909540
+2021-03-27 06:34:10,691 - Model - INFO - eval mIoU of Knife 0.871126
+2021-03-27 06:34:10,691 - Model - INFO - eval mIoU of Lamp 0.845433
+2021-03-27 06:34:10,691 - Model - INFO - eval mIoU of Laptop 0.953927
+2021-03-27 06:34:10,691 - Model - INFO - eval mIoU of Motorbike 0.725270
+2021-03-27 06:34:10,691 - Model - INFO - eval mIoU of Mug 0.942024
+2021-03-27 06:34:10,691 - Model - INFO - eval mIoU of Pistol 0.821088
+2021-03-27 06:34:10,691 - Model - INFO - eval mIoU of Rocket 0.571939
+2021-03-27 06:34:10,691 - Model - INFO - eval mIoU of Skateboard 0.765910
+2021-03-27 06:34:10,691 - Model - INFO - eval mIoU of Table 0.827791
+2021-03-27 06:34:10,692 - Model - INFO - Epoch 74 test Accuracy: 0.942648 Class avg mIOU: 0.818339 Inctance avg mIOU: 0.851367
+2021-03-27 06:34:10,692 - Model - INFO - Save model...
+2021-03-27 06:34:10,692 - Model - INFO - Saving at log/part_seg/pointnet2_part_seg_msg/checkpoints/best_model.pth
+2021-03-27 06:34:10,779 - Model - INFO - Saving model....
+2021-03-27 06:34:10,779 - Model - INFO - Best accuracy is: 0.94281
+2021-03-27 06:34:10,779 - Model - INFO - Best class avg mIOU is: 0.82164
+2021-03-27 06:34:10,779 - Model - INFO - Best inctance avg mIOU is: 0.85137
+2021-03-27 06:34:10,779 - Model - INFO - Epoch 75 (75/251):
+2021-03-27 06:34:10,780 - Model - INFO - Learning rate:0.000125
+2021-03-27 06:40:53,616 - Model - INFO - Train accuracy is: 0.95341
+2021-03-27 06:42:00,637 - Model - INFO - eval mIoU of Airplane 0.833076
+2021-03-27 06:42:00,637 - Model - INFO - eval mIoU of Bag 0.797242
+2021-03-27 06:42:00,637 - Model - INFO - eval mIoU of Cap 0.864157
+2021-03-27 06:42:00,638 - Model - INFO - eval mIoU of Car 0.774036
+2021-03-27 06:42:00,638 - Model - INFO - eval mIoU of Chair 0.907035
+2021-03-27 06:42:00,638 - Model - INFO - eval mIoU of Earphone 0.687701
+2021-03-27 06:42:00,638 - Model - INFO - eval mIoU of Guitar 0.914526
+2021-03-27 06:42:00,638 - Model - INFO - eval mIoU of Knife 0.865397
+2021-03-27 06:42:00,638 - Model - INFO - eval mIoU of Lamp 0.843775
+2021-03-27 06:42:00,638 - Model - INFO - eval mIoU of Laptop 0.954631
+2021-03-27 06:42:00,638 - Model - INFO - eval mIoU of Motorbike 0.714269
+2021-03-27 06:42:00,638 - Model - INFO - eval mIoU of Mug 0.950809
+2021-03-27 06:42:00,638 - Model - INFO - eval mIoU of Pistol 0.816798
+2021-03-27 06:42:00,638 - Model - INFO - eval mIoU of Rocket 0.631232
+2021-03-27 06:42:00,638 - Model - INFO - eval mIoU of Skateboard 0.765213
+2021-03-27 06:42:00,638 - Model - INFO - eval mIoU of Table 0.826256
+2021-03-27 06:42:00,638 - Model - INFO - Epoch 75 test Accuracy: 0.942021 Class avg mIOU: 0.821635 Inctance avg mIOU: 0.852776
+2021-03-27 06:42:00,639 - Model - INFO - Save model...
+2021-03-27 06:42:00,639 - Model - INFO - Saving at log/part_seg/pointnet2_part_seg_msg/checkpoints/best_model.pth
+2021-03-27 06:42:00,721 - Model - INFO - Saving model....
+2021-03-27 06:42:00,722 - Model - INFO - Best accuracy is: 0.94281
+2021-03-27 06:42:00,722 - Model - INFO - Best class avg mIOU is: 0.82164
+2021-03-27 06:42:00,722 - Model - INFO - Best inctance avg mIOU is: 0.85278
+2021-03-27 06:42:00,722 - Model - INFO - Epoch 76 (76/251):
+2021-03-27 06:42:00,722 - Model - INFO - Learning rate:0.000125
+2021-03-27 06:48:37,725 - Model - INFO - Train accuracy is: 0.95358
+2021-03-27 06:49:43,809 - Model - INFO - eval mIoU of Airplane 0.810542
+2021-03-27 06:49:43,810 - Model - INFO - eval mIoU of Bag 0.810344
+2021-03-27 06:49:43,810 - Model - INFO - eval mIoU of Cap 0.821111
+2021-03-27 06:49:43,810 - Model - INFO - eval mIoU of Car 0.775024
+2021-03-27 06:49:43,810 - Model - INFO - eval mIoU of Chair 0.901638
+2021-03-27 06:49:43,810 - Model - INFO - eval mIoU of Earphone 0.723193
+2021-03-27 06:49:43,810 - Model - INFO - eval mIoU of Guitar 0.910782
+2021-03-27 06:49:43,810 - Model - INFO - eval mIoU of Knife 0.865741
+2021-03-27 06:49:43,810 - Model - INFO - eval mIoU of Lamp 0.843412
+2021-03-27 06:49:43,810 - Model - INFO - eval mIoU of Laptop 0.954598
+2021-03-27 06:49:43,810 - Model - INFO - eval mIoU of Motorbike 0.664122
+2021-03-27 06:49:43,810 - Model - INFO - eval mIoU of Mug 0.936943
+2021-03-27 06:49:43,810 - Model - INFO - eval mIoU of Pistol 0.831253
+2021-03-27 06:49:43,810 - Model - INFO - eval mIoU of Rocket 0.580673
+2021-03-27 06:49:43,810 - Model - INFO - eval mIoU of Skateboard 0.756904
+2021-03-27 06:49:43,810 - Model - INFO - eval mIoU of Table 0.828653
+2021-03-27 06:49:43,811 - Model - INFO - Epoch 76 test Accuracy: 0.941631 Class avg mIOU: 0.813433 Inctance avg mIOU: 0.848227
+2021-03-27 06:49:43,811 - Model - INFO - Best accuracy is: 0.94281
+2021-03-27 06:49:43,811 - Model - INFO - Best class avg mIOU is: 0.82164
+2021-03-27 06:49:43,811 - Model - INFO - Best inctance avg mIOU is: 0.85278
+2021-03-27 06:49:43,811 - Model - INFO - Epoch 77 (77/251):
+2021-03-27 06:49:43,811 - Model - INFO - Learning rate:0.000125
+2021-03-27 06:56:23,101 - Model - INFO - Train accuracy is: 0.95351
+2021-03-27 06:57:31,416 - Model - INFO - eval mIoU of Airplane 0.817841
+2021-03-27 06:57:31,416 - Model - INFO - eval mIoU of Bag 0.834952
+2021-03-27 06:57:31,416 - Model - INFO - eval mIoU of Cap 0.858842
+2021-03-27 06:57:31,417 - Model - INFO - eval mIoU of Car 0.779359
+2021-03-27 06:57:31,417 - Model - INFO - eval mIoU of Chair 0.903983
+2021-03-27 06:57:31,417 - Model - INFO - eval mIoU of Earphone 0.680273
+2021-03-27 06:57:31,417 - Model - INFO - eval mIoU of Guitar 0.912599
+2021-03-27 06:57:31,417 - Model - INFO - eval mIoU of Knife 0.868335
+2021-03-27 06:57:31,417 - Model - INFO - eval mIoU of Lamp 0.846165
+2021-03-27 06:57:31,417 - Model - INFO - eval mIoU of Laptop 0.954753
+2021-03-27 06:57:31,417 - Model - INFO - eval mIoU of Motorbike 0.695926
+2021-03-27 06:57:31,417 - Model - INFO - eval mIoU of Mug 0.952320
+2021-03-27 06:57:31,417 - Model - INFO - eval mIoU of Pistol 0.830167
+2021-03-27 06:57:31,417 - Model - INFO - eval mIoU of Rocket 0.597646
+2021-03-27 06:57:31,417 - Model - INFO - eval mIoU of Skateboard 0.758155
+2021-03-27 06:57:31,417 - Model - INFO - eval mIoU of Table 0.825921
+2021-03-27 06:57:31,417 - Model - INFO - Epoch 77 test Accuracy: 0.942602 Class avg mIOU: 0.819827 Inctance avg mIOU: 0.850441
+2021-03-27 06:57:31,417 - Model - INFO - Best accuracy is: 0.94281
+2021-03-27 06:57:31,417 - Model - INFO - Best class avg mIOU is: 0.82164
+2021-03-27 06:57:31,417 - Model - INFO - Best inctance avg mIOU is: 0.85278
+2021-03-27 06:57:31,418 - Model - INFO - Epoch 78 (78/251):
+2021-03-27 06:57:31,418 - Model - INFO - Learning rate:0.000125
+2021-03-27 07:04:18,205 - Model - INFO - Train accuracy is: 0.95344
+2021-03-27 07:05:28,034 - Model - INFO - eval mIoU of Airplane 0.822395
+2021-03-27 07:05:28,034 - Model - INFO - eval mIoU of Bag 0.835353
+2021-03-27 07:05:28,034 - Model - INFO - eval mIoU of Cap 0.848199
+2021-03-27 07:05:28,034 - Model - INFO - eval mIoU of Car 0.782854
+2021-03-27 07:05:28,034 - Model - INFO - eval mIoU of Chair 0.902481
+2021-03-27 07:05:28,034 - Model - INFO - eval mIoU of Earphone 0.697528
+2021-03-27 07:05:28,035 - Model - INFO - eval mIoU of Guitar 0.911604
+2021-03-27 07:05:28,035 - Model - INFO - eval mIoU of Knife 0.877170
+2021-03-27 07:05:28,035 - Model - INFO - eval mIoU of Lamp 0.846803
+2021-03-27 07:05:28,035 - Model - INFO - eval mIoU of Laptop 0.954899
+2021-03-27 07:05:28,035 - Model - INFO - eval mIoU of Motorbike 0.712406
+2021-03-27 07:05:28,035 - Model - INFO - eval mIoU of Mug 0.936288
+2021-03-27 07:05:28,035 - Model - INFO - eval mIoU of Pistol 0.805104
+2021-03-27 07:05:28,035 - Model - INFO - eval mIoU of Rocket 0.526143
+2021-03-27 07:05:28,035 - Model - INFO - eval mIoU of Skateboard 0.764751
+2021-03-27 07:05:28,035 - Model - INFO - eval mIoU of Table 0.828060
+2021-03-27 07:05:28,035 - Model - INFO - Epoch 78 test Accuracy: 0.942599 Class avg mIOU: 0.815752 Inctance avg mIOU: 0.851210
+2021-03-27 07:05:28,035 - Model - INFO - Best accuracy is: 0.94281
+2021-03-27 07:05:28,035 - Model - INFO - Best class avg mIOU is: 0.82164
+2021-03-27 07:05:28,035 - Model - INFO - Best inctance avg mIOU is: 0.85278
+2021-03-27 07:05:28,036 - Model - INFO - Epoch 79 (79/251):
+2021-03-27 07:05:28,036 - Model - INFO - Learning rate:0.000125
+2021-03-27 07:12:08,598 - Model - INFO - Train accuracy is: 0.95347
+2021-03-27 07:13:14,954 - Model - INFO - eval mIoU of Airplane 0.814376
+2021-03-27 07:13:14,955 - Model - INFO - eval mIoU of Bag 0.798647
+2021-03-27 07:13:14,955 - Model - INFO - eval mIoU of Cap 0.805883
+2021-03-27 07:13:14,955 - Model - INFO - eval mIoU of Car 0.752191
+2021-03-27 07:13:14,955 - Model - INFO - eval mIoU of Chair 0.902481
+2021-03-27 07:13:14,955 - Model - INFO - eval mIoU of Earphone 0.733584
+2021-03-27 07:13:14,955 - Model - INFO - eval mIoU of Guitar 0.905251
+2021-03-27 07:13:14,955 - Model - INFO - eval mIoU of Knife 0.874251
+2021-03-27 07:13:14,956 - Model - INFO - eval mIoU of Lamp 0.828267
+2021-03-27 07:13:14,956 - Model - INFO - eval mIoU of Laptop 0.952465
+2021-03-27 07:13:14,956 - Model - INFO - eval mIoU of Motorbike 0.704365
+2021-03-27 07:13:14,956 - Model - INFO - eval mIoU of Mug 0.941518
+2021-03-27 07:13:14,956 - Model - INFO - eval mIoU of Pistol 0.808402
+2021-03-27 07:13:14,956 - Model - INFO - eval mIoU of Rocket 0.607885
+2021-03-27 07:13:14,956 - Model - INFO - eval mIoU of Skateboard 0.762233
+2021-03-27 07:13:14,956 - Model - INFO - eval mIoU of Table 0.817535
+2021-03-27 07:13:14,957 - Model - INFO - Epoch 79 test Accuracy: 0.939265 Class avg mIOU: 0.813083 Inctance avg mIOU: 0.843246
+2021-03-27 07:13:14,957 - Model - INFO - Best accuracy is: 0.94281
+2021-03-27 07:13:14,957 - Model - INFO - Best class avg mIOU is: 0.82164
+2021-03-27 07:13:14,957 - Model - INFO - Best inctance avg mIOU is: 0.85278
+2021-03-27 07:13:14,957 - Model - INFO - Epoch 80 (80/251):
+2021-03-27 07:13:14,957 - Model - INFO - Learning rate:0.000125
+2021-03-27 07:20:01,585 - Model - INFO - Train accuracy is: 0.95361
+2021-03-27 07:21:09,062 - Model - INFO - eval mIoU of Airplane 0.810732
+2021-03-27 07:21:09,063 - Model - INFO - eval mIoU of Bag 0.810216
+2021-03-27 07:21:09,063 - Model - INFO - eval mIoU of Cap 0.829931
+2021-03-27 07:21:09,063 - Model - INFO - eval mIoU of Car 0.749789
+2021-03-27 07:21:09,063 - Model - INFO - eval mIoU of Chair 0.900530
+2021-03-27 07:21:09,063 - Model - INFO - eval mIoU of Earphone 0.730916
+2021-03-27 07:21:09,063 - Model - INFO - eval mIoU of Guitar 0.911790
+2021-03-27 07:21:09,063 - Model - INFO - eval mIoU of Knife 0.870390
+2021-03-27 07:21:09,063 - Model - INFO - eval mIoU of Lamp 0.840857
+2021-03-27 07:21:09,064 - Model - INFO - eval mIoU of Laptop 0.954802
+2021-03-27 07:21:09,064 - Model - INFO - eval mIoU of Motorbike 0.715663
+2021-03-27 07:21:09,064 - Model - INFO - eval mIoU of Mug 0.930950
+2021-03-27 07:21:09,064 - Model - INFO - eval mIoU of Pistol 0.808431
+2021-03-27 07:21:09,064 - Model - INFO - eval mIoU of Rocket 0.590333
+2021-03-27 07:21:09,064 - Model - INFO - eval mIoU of Skateboard 0.760398
+2021-03-27 07:21:09,064 - Model - INFO - eval mIoU of Table 0.820152
+2021-03-27 07:21:09,064 - Model - INFO - Epoch 80 test Accuracy: 0.941214 Class avg mIOU: 0.814743 Inctance avg mIOU: 0.844654
+2021-03-27 07:21:09,064 - Model - INFO - Best accuracy is: 0.94281
+2021-03-27 07:21:09,064 - Model - INFO - Best class avg mIOU is: 0.82164
+2021-03-27 07:21:09,065 - Model - INFO - Best inctance avg mIOU is: 0.85278
+2021-03-27 07:21:09,065 - Model - INFO - Epoch 81 (81/251):
+2021-03-27 07:21:09,065 - Model - INFO - Learning rate:0.000063
+2021-03-27 07:27:51,150 - Model - INFO - Train accuracy is: 0.95436
+2021-03-27 07:29:00,233 - Model - INFO - eval mIoU of Airplane 0.819029
+2021-03-27 07:29:00,233 - Model - INFO - eval mIoU of Bag 0.846507
+2021-03-27 07:29:00,233 - Model - INFO - eval mIoU of Cap 0.858299
+2021-03-27 07:29:00,233 - Model - INFO - eval mIoU of Car 0.770296
+2021-03-27 07:29:00,234 - Model - INFO - eval mIoU of Chair 0.906075
+2021-03-27 07:29:00,234 - Model - INFO - eval mIoU of Earphone 0.730905
+2021-03-27 07:29:00,234 - Model - INFO - eval mIoU of Guitar 0.900189
+2021-03-27 07:29:00,234 - Model - INFO - eval mIoU of Knife 0.874061
+2021-03-27 07:29:00,234 - Model - INFO - eval mIoU of Lamp 0.855879
+2021-03-27 07:29:00,234 - Model - INFO - eval mIoU of Laptop 0.954763
+2021-03-27 07:29:00,234 - Model - INFO - eval mIoU of Motorbike 0.734419
+2021-03-27 07:29:00,234 - Model - INFO - eval mIoU of Mug 0.936797
+2021-03-27 07:29:00,234 - Model - INFO - eval mIoU of Pistol 0.816643
+2021-03-27 07:29:00,234 - Model - INFO - eval mIoU of Rocket 0.583793
+2021-03-27 07:29:00,234 - Model - INFO - eval mIoU of Skateboard 0.762145
+2021-03-27 07:29:00,234 - Model - INFO - eval mIoU of Table 0.823251
+2021-03-27 07:29:00,234 - Model - INFO - Epoch 81 test Accuracy: 0.943194 Class avg mIOU: 0.823316 Inctance avg mIOU: 0.850805
+2021-03-27 07:29:00,234 - Model - INFO - Best accuracy is: 0.94319
+2021-03-27 07:29:00,235 - Model - INFO - Best class avg mIOU is: 0.82332
+2021-03-27 07:29:00,235 - Model - INFO - Best inctance avg mIOU is: 0.85278
+2021-03-27 07:29:00,235 - Model - INFO - Epoch 82 (82/251):
+2021-03-27 07:29:00,235 - Model - INFO - Learning rate:0.000063
+2021-03-27 07:35:36,740 - Model - INFO - Train accuracy is: 0.95482
+2021-03-27 07:36:45,537 - Model - INFO - eval mIoU of Airplane 0.818196
+2021-03-27 07:36:45,537 - Model - INFO - eval mIoU of Bag 0.827139
+2021-03-27 07:36:45,537 - Model - INFO - eval mIoU of Cap 0.850448
+2021-03-27 07:36:45,537 - Model - INFO - eval mIoU of Car 0.777643
+2021-03-27 07:36:45,537 - Model - INFO - eval mIoU of Chair 0.904765
+2021-03-27 07:36:45,538 - Model - INFO - eval mIoU of Earphone 0.729892
+2021-03-27 07:36:45,538 - Model - INFO - eval mIoU of Guitar 0.909426
+2021-03-27 07:36:45,538 - Model - INFO - eval mIoU of Knife 0.874226
+2021-03-27 07:36:45,538 - Model - INFO - eval mIoU of Lamp 0.842402
+2021-03-27 07:36:45,538 - Model - INFO - eval mIoU of Laptop 0.955803
+2021-03-27 07:36:45,538 - Model - INFO - eval mIoU of Motorbike 0.714825
+2021-03-27 07:36:45,538 - Model - INFO - eval mIoU of Mug 0.951457
+2021-03-27 07:36:45,538 - Model - INFO - eval mIoU of Pistol 0.819033
+2021-03-27 07:36:45,538 - Model - INFO - eval mIoU of Rocket 0.593776
+2021-03-27 07:36:45,538 - Model - INFO - eval mIoU of Skateboard 0.762296
+2021-03-27 07:36:45,538 - Model - INFO - eval mIoU of Table 0.827941
+2021-03-27 07:36:45,538 - Model - INFO - Epoch 82 test Accuracy: 0.942723 Class avg mIOU: 0.822454 Inctance avg mIOU: 0.851175
+2021-03-27 07:36:45,539 - Model - INFO - Best accuracy is: 0.94319
+2021-03-27 07:36:45,539 - Model - INFO - Best class avg mIOU is: 0.82332
+2021-03-27 07:36:45,539 - Model - INFO - Best inctance avg mIOU is: 0.85278
+2021-03-27 07:36:45,539 - Model - INFO - Epoch 83 (83/251):
+2021-03-27 07:36:45,539 - Model - INFO - Learning rate:0.000063
+2021-03-27 07:43:27,479 - Model - INFO - Train accuracy is: 0.95481
+2021-03-27 07:44:36,294 - Model - INFO - eval mIoU of Airplane 0.819901
+2021-03-27 07:44:36,295 - Model - INFO - eval mIoU of Bag 0.826995
+2021-03-27 07:44:36,295 - Model - INFO - eval mIoU of Cap 0.841050
+2021-03-27 07:44:36,295 - Model - INFO - eval mIoU of Car 0.773892
+2021-03-27 07:44:36,295 - Model - INFO - eval mIoU of Chair 0.901170
+2021-03-27 07:44:36,295 - Model - INFO - eval mIoU of Earphone 0.695897
+2021-03-27 07:44:36,295 - Model - INFO - eval mIoU of Guitar 0.908067
+2021-03-27 07:44:36,295 - Model - INFO - eval mIoU of Knife 0.870620
+2021-03-27 07:44:36,295 - Model - INFO - eval mIoU of Lamp 0.843056
+2021-03-27 07:44:36,295 - Model - INFO - eval mIoU of Laptop 0.953480
+2021-03-27 07:44:36,295 - Model - INFO - eval mIoU of Motorbike 0.712040
+2021-03-27 07:44:36,296 - Model - INFO - eval mIoU of Mug 0.949745
+2021-03-27 07:44:36,296 - Model - INFO - eval mIoU of Pistol 0.825606
+2021-03-27 07:44:36,296 - Model - INFO - eval mIoU of Rocket 0.610574
+2021-03-27 07:44:36,296 - Model - INFO - eval mIoU of Skateboard 0.756492
+2021-03-27 07:44:36,296 - Model - INFO - eval mIoU of Table 0.821834
+2021-03-27 07:44:36,296 - Model - INFO - Epoch 83 test Accuracy: 0.942191 Class avg mIOU: 0.819401 Inctance avg mIOU: 0.848144
+2021-03-27 07:44:36,296 - Model - INFO - Best accuracy is: 0.94319
+2021-03-27 07:44:36,297 - Model - INFO - Best class avg mIOU is: 0.82332
+2021-03-27 07:44:36,297 - Model - INFO - Best inctance avg mIOU is: 0.85278
+2021-03-27 07:44:36,297 - Model - INFO - Epoch 84 (84/251):
+2021-03-27 07:44:36,297 - Model - INFO - Learning rate:0.000063
+2021-03-27 07:51:13,368 - Model - INFO - Train accuracy is: 0.95487
+2021-03-27 07:52:19,752 - Model - INFO - eval mIoU of Airplane 0.821694
+2021-03-27 07:52:19,752 - Model - INFO - eval mIoU of Bag 0.815332
+2021-03-27 07:52:19,752 - Model - INFO - eval mIoU of Cap 0.849524
+2021-03-27 07:52:19,752 - Model - INFO - eval mIoU of Car 0.772298
+2021-03-27 07:52:19,752 - Model - INFO - eval mIoU of Chair 0.903557
+2021-03-27 07:52:19,752 - Model - INFO - eval mIoU of Earphone 0.685984
+2021-03-27 07:52:19,752 - Model - INFO - eval mIoU of Guitar 0.911942
+2021-03-27 07:52:19,752 - Model - INFO - eval mIoU of Knife 0.869775
+2021-03-27 07:52:19,753 - Model - INFO - eval mIoU of Lamp 0.847322
+2021-03-27 07:52:19,753 - Model - INFO - eval mIoU of Laptop 0.952343
+2021-03-27 07:52:19,753 - Model - INFO - eval mIoU of Motorbike 0.723675
+2021-03-27 07:52:19,753 - Model - INFO - eval mIoU of Mug 0.937510
+2021-03-27 07:52:19,753 - Model - INFO - eval mIoU of Pistol 0.817792
+2021-03-27 07:52:19,753 - Model - INFO - eval mIoU of Rocket 0.589734
+2021-03-27 07:52:19,753 - Model - INFO - eval mIoU of Skateboard 0.756621
+2021-03-27 07:52:19,753 - Model - INFO - eval mIoU of Table 0.826275
+2021-03-27 07:52:19,753 - Model - INFO - Epoch 84 test Accuracy: 0.943155 Class avg mIOU: 0.817586 Inctance avg mIOU: 0.850514
+2021-03-27 07:52:19,753 - Model - INFO - Best accuracy is: 0.94319
+2021-03-27 07:52:19,753 - Model - INFO - Best class avg mIOU is: 0.82332
+2021-03-27 07:52:19,753 - Model - INFO - Best inctance avg mIOU is: 0.85278
+2021-03-27 07:52:19,754 - Model - INFO - Epoch 85 (85/251):
+2021-03-27 07:52:19,754 - Model - INFO - Learning rate:0.000063
+2021-03-27 07:59:09,083 - Model - INFO - Train accuracy is: 0.95517
+2021-03-27 08:00:17,678 - Model - INFO - eval mIoU of Airplane 0.820701
+2021-03-27 08:00:17,679 - Model - INFO - eval mIoU of Bag 0.811203
+2021-03-27 08:00:17,679 - Model - INFO - eval mIoU of Cap 0.854854
+2021-03-27 08:00:17,679 - Model - INFO - eval mIoU of Car 0.778660
+2021-03-27 08:00:17,679 - Model - INFO - eval mIoU of Chair 0.905884
+2021-03-27 08:00:17,679 - Model - INFO - eval mIoU of Earphone 0.730516
+2021-03-27 08:00:17,679 - Model - INFO - eval mIoU of Guitar 0.908500
+2021-03-27 08:00:17,679 - Model - INFO - eval mIoU of Knife 0.868193
+2021-03-27 08:00:17,679 - Model - INFO - eval mIoU of Lamp 0.851085
+2021-03-27 08:00:17,679 - Model - INFO - eval mIoU of Laptop 0.953594
+2021-03-27 08:00:17,679 - Model - INFO - eval mIoU of Motorbike 0.721575
+2021-03-27 08:00:17,679 - Model - INFO - eval mIoU of Mug 0.939219
+2021-03-27 08:00:17,679 - Model - INFO - eval mIoU of Pistol 0.824219
+2021-03-27 08:00:17,679 - Model - INFO - eval mIoU of Rocket 0.605739
+2021-03-27 08:00:17,679 - Model - INFO - eval mIoU of Skateboard 0.761224
+2021-03-27 08:00:17,679 - Model - INFO - eval mIoU of Table 0.827886
+2021-03-27 08:00:17,680 - Model - INFO - Epoch 85 test Accuracy: 0.942981 Class avg mIOU: 0.822691 Inctance avg mIOU: 0.852384
+2021-03-27 08:00:17,680 - Model - INFO - Best accuracy is: 0.94319
+2021-03-27 08:00:17,680 - Model - INFO - Best class avg mIOU is: 0.82332
+2021-03-27 08:00:17,680 - Model - INFO - Best inctance avg mIOU is: 0.85278
+2021-03-27 08:00:17,680 - Model - INFO - Epoch 86 (86/251):
+2021-03-27 08:00:17,680 - Model - INFO - Learning rate:0.000063
+2021-03-27 08:07:01,478 - Model - INFO - Train accuracy is: 0.95505
+2021-03-27 08:08:10,558 - Model - INFO - eval mIoU of Airplane 0.821425
+2021-03-27 08:08:10,559 - Model - INFO - eval mIoU of Bag 0.816650
+2021-03-27 08:08:10,559 - Model - INFO - eval mIoU of Cap 0.858563
+2021-03-27 08:08:10,559 - Model - INFO - eval mIoU of Car 0.769943
+2021-03-27 08:08:10,559 - Model - INFO - eval mIoU of Chair 0.903339
+2021-03-27 08:08:10,559 - Model - INFO - eval mIoU of Earphone 0.689639
+2021-03-27 08:08:10,559 - Model - INFO - eval mIoU of Guitar 0.908525
+2021-03-27 08:08:10,559 - Model - INFO - eval mIoU of Knife 0.866073
+2021-03-27 08:08:10,559 - Model - INFO - eval mIoU of Lamp 0.843200
+2021-03-27 08:08:10,559 - Model - INFO - eval mIoU of Laptop 0.955644
+2021-03-27 08:08:10,559 - Model - INFO - eval mIoU of Motorbike 0.710542
+2021-03-27 08:08:10,559 - Model - INFO - eval mIoU of Mug 0.938292
+2021-03-27 08:08:10,559 - Model - INFO - eval mIoU of Pistol 0.810245
+2021-03-27 08:08:10,559 - Model - INFO - eval mIoU of Rocket 0.592115
+2021-03-27 08:08:10,559 - Model - INFO - eval mIoU of Skateboard 0.764862
+2021-03-27 08:08:10,559 - Model - INFO - eval mIoU of Table 0.828427
+2021-03-27 08:08:10,560 - Model - INFO - Epoch 86 test Accuracy: 0.942421 Class avg mIOU: 0.817343 Inctance avg mIOU: 0.850146
+2021-03-27 08:08:10,560 - Model - INFO - Best accuracy is: 0.94319
+2021-03-27 08:08:10,560 - Model - INFO - Best class avg mIOU is: 0.82332
+2021-03-27 08:08:10,560 - Model - INFO - Best inctance avg mIOU is: 0.85278
+2021-03-27 08:08:10,560 - Model - INFO - Epoch 87 (87/251):
+2021-03-27 08:08:10,560 - Model - INFO - Learning rate:0.000063
+2021-03-27 08:14:50,654 - Model - INFO - Train accuracy is: 0.95507
+2021-03-27 08:15:58,830 - Model - INFO - eval mIoU of Airplane 0.822102
+2021-03-27 08:15:58,830 - Model - INFO - eval mIoU of Bag 0.790079
+2021-03-27 08:15:58,830 - Model - INFO - eval mIoU of Cap 0.855325
+2021-03-27 08:15:58,830 - Model - INFO - eval mIoU of Car 0.773067
+2021-03-27 08:15:58,830 - Model - INFO - eval mIoU of Chair 0.903686
+2021-03-27 08:15:58,830 - Model - INFO - eval mIoU of Earphone 0.703512
+2021-03-27 08:15:58,830 - Model - INFO - eval mIoU of Guitar 0.907275
+2021-03-27 08:15:58,831 - Model - INFO - eval mIoU of Knife 0.877941
+2021-03-27 08:15:58,831 - Model - INFO - eval mIoU of Lamp 0.849218
+2021-03-27 08:15:58,831 - Model - INFO - eval mIoU of Laptop 0.954435
+2021-03-27 08:15:58,831 - Model - INFO - eval mIoU of Motorbike 0.723945
+2021-03-27 08:15:58,831 - Model - INFO - eval mIoU of Mug 0.957847
+2021-03-27 08:15:58,831 - Model - INFO - eval mIoU of Pistol 0.821950
+2021-03-27 08:15:58,831 - Model - INFO - eval mIoU of Rocket 0.596976
+2021-03-27 08:15:58,831 - Model - INFO - eval mIoU of Skateboard 0.756623
+2021-03-27 08:15:58,831 - Model - INFO - eval mIoU of Table 0.824903
+2021-03-27 08:15:58,831 - Model - INFO - Epoch 87 test Accuracy: 0.942789 Class avg mIOU: 0.819930 Inctance avg mIOU: 0.850802
+2021-03-27 08:15:58,831 - Model - INFO - Best accuracy is: 0.94319
+2021-03-27 08:15:58,831 - Model - INFO - Best class avg mIOU is: 0.82332
+2021-03-27 08:15:58,831 - Model - INFO - Best inctance avg mIOU is: 0.85278
+2021-03-27 08:15:58,832 - Model - INFO - Epoch 88 (88/251):
+2021-03-27 08:15:58,832 - Model - INFO - Learning rate:0.000063
+2021-03-27 08:22:37,185 - Model - INFO - Train accuracy is: 0.95508
+2021-03-27 08:23:43,778 - Model - INFO - eval mIoU of Airplane 0.826364
+2021-03-27 08:23:43,778 - Model - INFO - eval mIoU of Bag 0.835314
+2021-03-27 08:23:43,778 - Model - INFO - eval mIoU of Cap 0.808961
+2021-03-27 08:23:43,778 - Model - INFO - eval mIoU of Car 0.774469
+2021-03-27 08:23:43,778 - Model - INFO - eval mIoU of Chair 0.906985
+2021-03-27 08:23:43,778 - Model - INFO - eval mIoU of Earphone 0.701580
+2021-03-27 08:23:43,778 - Model - INFO - eval mIoU of Guitar 0.907158
+2021-03-27 08:23:43,778 - Model - INFO - eval mIoU of Knife 0.871028
+2021-03-27 08:23:43,778 - Model - INFO - eval mIoU of Lamp 0.848345
+2021-03-27 08:23:43,778 - Model - INFO - eval mIoU of Laptop 0.953473
+2021-03-27 08:23:43,779 - Model - INFO - eval mIoU of Motorbike 0.719170
+2021-03-27 08:23:43,779 - Model - INFO - eval mIoU of Mug 0.952881
+2021-03-27 08:23:43,779 - Model - INFO - eval mIoU of Pistol 0.829298
+2021-03-27 08:23:43,779 - Model - INFO - eval mIoU of Rocket 0.595161
+2021-03-27 08:23:43,779 - Model - INFO - eval mIoU of Skateboard 0.760668
+2021-03-27 08:23:43,779 - Model - INFO - eval mIoU of Table 0.825335
+2021-03-27 08:23:43,779 - Model - INFO - Epoch 88 test Accuracy: 0.942444 Class avg mIOU: 0.819762 Inctance avg mIOU: 0.852038
+2021-03-27 08:23:43,779 - Model - INFO - Best accuracy is: 0.94319
+2021-03-27 08:23:43,779 - Model - INFO - Best class avg mIOU is: 0.82332
+2021-03-27 08:23:43,779 - Model - INFO - Best inctance avg mIOU is: 0.85278
+2021-03-27 08:23:43,779 - Model - INFO - Epoch 89 (89/251):
+2021-03-27 08:23:43,779 - Model - INFO - Learning rate:0.000063
+2021-03-27 08:30:27,820 - Model - INFO - Train accuracy is: 0.95521
+2021-03-27 08:31:34,763 - Model - INFO - eval mIoU of Airplane 0.827847
+2021-03-27 08:31:34,764 - Model - INFO - eval mIoU of Bag 0.818288
+2021-03-27 08:31:34,764 - Model - INFO - eval mIoU of Cap 0.866676
+2021-03-27 08:31:34,764 - Model - INFO - eval mIoU of Car 0.771113
+2021-03-27 08:31:34,764 - Model - INFO - eval mIoU of Chair 0.906148
+2021-03-27 08:31:34,764 - Model - INFO - eval mIoU of Earphone 0.692666
+2021-03-27 08:31:34,764 - Model - INFO - eval mIoU of Guitar 0.908491
+2021-03-27 08:31:34,764 - Model - INFO - eval mIoU of Knife 0.875272
+2021-03-27 08:31:34,764 - Model - INFO - eval mIoU of Lamp 0.836115
+2021-03-27 08:31:34,765 - Model - INFO - eval mIoU of Laptop 0.952510
+2021-03-27 08:31:34,765 - Model - INFO - eval mIoU of Motorbike 0.722109
+2021-03-27 08:31:34,765 - Model - INFO - eval mIoU of Mug 0.950295
+2021-03-27 08:31:34,765 - Model - INFO - eval mIoU of Pistol 0.813165
+2021-03-27 08:31:34,765 - Model - INFO - eval mIoU of Rocket 0.612954
+2021-03-27 08:31:34,765 - Model - INFO - eval mIoU of Skateboard 0.766464
+2021-03-27 08:31:34,765 - Model - INFO - eval mIoU of Table 0.829546
+2021-03-27 08:31:34,766 - Model - INFO - Epoch 89 test Accuracy: 0.942694 Class avg mIOU: 0.821854 Inctance avg mIOU: 0.852016
+2021-03-27 08:31:34,766 - Model - INFO - Best accuracy is: 0.94319
+2021-03-27 08:31:34,766 - Model - INFO - Best class avg mIOU is: 0.82332
+2021-03-27 08:31:34,766 - Model - INFO - Best inctance avg mIOU is: 0.85278
+2021-03-27 08:31:34,766 - Model - INFO - Epoch 90 (90/251):
+2021-03-27 08:31:34,766 - Model - INFO - Learning rate:0.000063
+2021-03-27 08:38:20,795 - Model - INFO - Train accuracy is: 0.95527
+2021-03-27 08:39:28,458 - Model - INFO - eval mIoU of Airplane 0.825929
+2021-03-27 08:39:28,458 - Model - INFO - eval mIoU of Bag 0.812837
+2021-03-27 08:39:28,458 - Model - INFO - eval mIoU of Cap 0.858925
+2021-03-27 08:39:28,458 - Model - INFO - eval mIoU of Car 0.777061
+2021-03-27 08:39:28,458 - Model - INFO - eval mIoU of Chair 0.906232
+2021-03-27 08:39:28,458 - Model - INFO - eval mIoU of Earphone 0.730288
+2021-03-27 08:39:28,458 - Model - INFO - eval mIoU of Guitar 0.909917
+2021-03-27 08:39:28,458 - Model - INFO - eval mIoU of Knife 0.872366
+2021-03-27 08:39:28,459 - Model - INFO - eval mIoU of Lamp 0.843252
+2021-03-27 08:39:28,459 - Model - INFO - eval mIoU of Laptop 0.954089
+2021-03-27 08:39:28,459 - Model - INFO - eval mIoU of Motorbike 0.719311
+2021-03-27 08:39:28,459 - Model - INFO - eval mIoU of Mug 0.948359
+2021-03-27 08:39:28,459 - Model - INFO - eval mIoU of Pistol 0.818199
+2021-03-27 08:39:28,459 - Model - INFO - eval mIoU of Rocket 0.617630
+2021-03-27 08:39:28,459 - Model - INFO - eval mIoU of Skateboard 0.760808
+2021-03-27 08:39:28,459 - Model - INFO - eval mIoU of Table 0.821889
+2021-03-27 08:39:28,459 - Model - INFO - Epoch 90 test Accuracy: 0.942776 Class avg mIOU: 0.823568 Inctance avg mIOU: 0.850718
+2021-03-27 08:39:28,459 - Model - INFO - Best accuracy is: 0.94319
+2021-03-27 08:39:28,459 - Model - INFO - Best class avg mIOU is: 0.82357
+2021-03-27 08:39:28,459 - Model - INFO - Best inctance avg mIOU is: 0.85278
+2021-03-27 08:39:28,460 - Model - INFO - Epoch 91 (91/251):
+2021-03-27 08:39:28,460 - Model - INFO - Learning rate:0.000063
+2021-03-27 08:46:04,593 - Model - INFO - Train accuracy is: 0.95513
+2021-03-27 08:47:11,970 - Model - INFO - eval mIoU of Airplane 0.828464
+2021-03-27 08:47:11,970 - Model - INFO - eval mIoU of Bag 0.816129
+2021-03-27 08:47:11,971 - Model - INFO - eval mIoU of Cap 0.850153
+2021-03-27 08:47:11,971 - Model - INFO - eval mIoU of Car 0.771390
+2021-03-27 08:47:11,971 - Model - INFO - eval mIoU of Chair 0.904884
+2021-03-27 08:47:11,971 - Model - INFO - eval mIoU of Earphone 0.671827
+2021-03-27 08:47:11,971 - Model - INFO - eval mIoU of Guitar 0.908786
+2021-03-27 08:47:11,971 - Model - INFO - eval mIoU of Knife 0.868445
+2021-03-27 08:47:11,971 - Model - INFO - eval mIoU of Lamp 0.846280
+2021-03-27 08:47:11,971 - Model - INFO - eval mIoU of Laptop 0.955255
+2021-03-27 08:47:11,971 - Model - INFO - eval mIoU of Motorbike 0.717218
+2021-03-27 08:47:11,971 - Model - INFO - eval mIoU of Mug 0.949481
+2021-03-27 08:47:11,971 - Model - INFO - eval mIoU of Pistol 0.813020
+2021-03-27 08:47:11,971 - Model - INFO - eval mIoU of Rocket 0.598130
+2021-03-27 08:47:11,971 - Model - INFO - eval mIoU of Skateboard 0.758369
+2021-03-27 08:47:11,971 - Model - INFO - eval mIoU of Table 0.828384
+2021-03-27 08:47:11,972 - Model - INFO - Epoch 91 test Accuracy: 0.943048 Class avg mIOU: 0.817888 Inctance avg mIOU: 0.851945
+2021-03-27 08:47:11,972 - Model - INFO - Best accuracy is: 0.94319
+2021-03-27 08:47:11,972 - Model - INFO - Best class avg mIOU is: 0.82357
+2021-03-27 08:47:11,972 - Model - INFO - Best inctance avg mIOU is: 0.85278
+2021-03-27 08:47:11,972 - Model - INFO - Epoch 92 (92/251):
+2021-03-27 08:47:11,972 - Model - INFO - Learning rate:0.000063
+2021-03-27 08:53:52,620 - Model - INFO - Train accuracy is: 0.95519
+2021-03-27 08:55:00,996 - Model - INFO - eval mIoU of Airplane 0.822648
+2021-03-27 08:55:00,998 - Model - INFO - eval mIoU of Bag 0.796805
+2021-03-27 08:55:00,999 - Model - INFO - eval mIoU of Cap 0.853916
+2021-03-27 08:55:00,999 - Model - INFO - eval mIoU of Car 0.776219
+2021-03-27 08:55:00,999 - Model - INFO - eval mIoU of Chair 0.903053
+2021-03-27 08:55:00,999 - Model - INFO - eval mIoU of Earphone 0.733580
+2021-03-27 08:55:00,999 - Model - INFO - eval mIoU of Guitar 0.909274
+2021-03-27 08:55:00,999 - Model - INFO - eval mIoU of Knife 0.869629
+2021-03-27 08:55:00,999 - Model - INFO - eval mIoU of Lamp 0.842581
+2021-03-27 08:55:00,999 - Model - INFO - eval mIoU of Laptop 0.950722
+2021-03-27 08:55:00,999 - Model - INFO - eval mIoU of Motorbike 0.708929
+2021-03-27 08:55:00,999 - Model - INFO - eval mIoU of Mug 0.957613
+2021-03-27 08:55:01,000 - Model - INFO - eval mIoU of Pistol 0.808980
+2021-03-27 08:55:01,000 - Model - INFO - eval mIoU of Rocket 0.609491
+2021-03-27 08:55:01,000 - Model - INFO - eval mIoU of Skateboard 0.768142
+2021-03-27 08:55:01,000 - Model - INFO - eval mIoU of Table 0.825801
+2021-03-27 08:55:01,001 - Model - INFO - Epoch 92 test Accuracy: 0.942295 Class avg mIOU: 0.821086 Inctance avg mIOU: 0.850143
+2021-03-27 08:55:01,001 - Model - INFO - Best accuracy is: 0.94319
+2021-03-27 08:55:01,001 - Model - INFO - Best class avg mIOU is: 0.82357
+2021-03-27 08:55:01,001 - Model - INFO - Best inctance avg mIOU is: 0.85278
+2021-03-27 08:55:01,001 - Model - INFO - Epoch 93 (93/251):
+2021-03-27 08:55:01,001 - Model - INFO - Learning rate:0.000063
+2021-03-27 09:01:48,004 - Model - INFO - Train accuracy is: 0.95494
+2021-03-27 09:02:54,341 - Model - INFO - eval mIoU of Airplane 0.819023
+2021-03-27 09:02:54,341 - Model - INFO - eval mIoU of Bag 0.804206
+2021-03-27 09:02:54,341 - Model - INFO - eval mIoU of Cap 0.848604
+2021-03-27 09:02:54,341 - Model - INFO - eval mIoU of Car 0.773988
+2021-03-27 09:02:54,341 - Model - INFO - eval mIoU of Chair 0.903514
+2021-03-27 09:02:54,342 - Model - INFO - eval mIoU of Earphone 0.702972
+2021-03-27 09:02:54,342 - Model - INFO - eval mIoU of Guitar 0.911755
+2021-03-27 09:02:54,342 - Model - INFO - eval mIoU of Knife 0.874676
+2021-03-27 09:02:54,342 - Model - INFO - eval mIoU of Lamp 0.834705
+2021-03-27 09:02:54,342 - Model - INFO - eval mIoU of Laptop 0.952018
+2021-03-27 09:02:54,342 - Model - INFO - eval mIoU of Motorbike 0.733551
+2021-03-27 09:02:54,342 - Model - INFO - eval mIoU of Mug 0.946958
+2021-03-27 09:02:54,342 - Model - INFO - eval mIoU of Pistol 0.826287
+2021-03-27 09:02:54,342 - Model - INFO - eval mIoU of Rocket 0.598064
+2021-03-27 09:02:54,342 - Model - INFO - eval mIoU of Skateboard 0.768286
+2021-03-27 09:02:54,342 - Model - INFO - eval mIoU of Table 0.831434
+2021-03-27 09:02:54,343 - Model - INFO - Epoch 93 test Accuracy: 0.943465 Class avg mIOU: 0.820628 Inctance avg mIOU: 0.851278
+2021-03-27 09:02:54,343 - Model - INFO - Best accuracy is: 0.94347
+2021-03-27 09:02:54,343 - Model - INFO - Best class avg mIOU is: 0.82357
+2021-03-27 09:02:54,343 - Model - INFO - Best inctance avg mIOU is: 0.85278
+2021-03-27 09:02:54,343 - Model - INFO - Epoch 94 (94/251):
+2021-03-27 09:02:54,343 - Model - INFO - Learning rate:0.000063
+2021-03-27 09:09:31,891 - Model - INFO - Train accuracy is: 0.95549
+2021-03-27 09:10:38,745 - Model - INFO - eval mIoU of Airplane 0.830880
+2021-03-27 09:10:38,746 - Model - INFO - eval mIoU of Bag 0.816998
+2021-03-27 09:10:38,746 - Model - INFO - eval mIoU of Cap 0.815639
+2021-03-27 09:10:38,746 - Model - INFO - eval mIoU of Car 0.777625
+2021-03-27 09:10:38,746 - Model - INFO - eval mIoU of Chair 0.904604
+2021-03-27 09:10:38,746 - Model - INFO - eval mIoU of Earphone 0.662304
+2021-03-27 09:10:38,746 - Model - INFO - eval mIoU of Guitar 0.913737
+2021-03-27 09:10:38,746 - Model - INFO - eval mIoU of Knife 0.866592
+2021-03-27 09:10:38,746 - Model - INFO - eval mIoU of Lamp 0.843575
+2021-03-27 09:10:38,746 - Model - INFO - eval mIoU of Laptop 0.956251
+2021-03-27 09:10:38,746 - Model - INFO - eval mIoU of Motorbike 0.698271
+2021-03-27 09:10:38,746 - Model - INFO - eval mIoU of Mug 0.950779
+2021-03-27 09:10:38,746 - Model - INFO - eval mIoU of Pistol 0.833049
+2021-03-27 09:10:38,746 - Model - INFO - eval mIoU of Rocket 0.594427
+2021-03-27 09:10:38,746 - Model - INFO - eval mIoU of Skateboard 0.763471
+2021-03-27 09:10:38,746 - Model - INFO - eval mIoU of Table 0.828476
+2021-03-27 09:10:38,746 - Model - INFO - Epoch 94 test Accuracy: 0.943077 Class avg mIOU: 0.816042 Inctance avg mIOU: 0.852368
+2021-03-27 09:10:38,746 - Model - INFO - Best accuracy is: 0.94347
+2021-03-27 09:10:38,746 - Model - INFO - Best class avg mIOU is: 0.82357
+2021-03-27 09:10:38,747 - Model - INFO - Best inctance avg mIOU is: 0.85278
+2021-03-27 09:10:38,747 - Model - INFO - Epoch 95 (95/251):
+2021-03-27 09:10:38,747 - Model - INFO - Learning rate:0.000063
+2021-03-27 09:17:20,225 - Model - INFO - Train accuracy is: 0.95530
+2021-03-27 09:18:29,321 - Model - INFO - eval mIoU of Airplane 0.824260
+2021-03-27 09:18:29,322 - Model - INFO - eval mIoU of Bag 0.838880
+2021-03-27 09:18:29,322 - Model - INFO - eval mIoU of Cap 0.828275
+2021-03-27 09:18:29,322 - Model - INFO - eval mIoU of Car 0.767907
+2021-03-27 09:18:29,322 - Model - INFO - eval mIoU of Chair 0.907018
+2021-03-27 09:18:29,322 - Model - INFO - eval mIoU of Earphone 0.740106
+2021-03-27 09:18:29,322 - Model - INFO - eval mIoU of Guitar 0.912006
+2021-03-27 09:18:29,322 - Model - INFO - eval mIoU of Knife 0.872745
+2021-03-27 09:18:29,322 - Model - INFO - eval mIoU of Lamp 0.845132
+2021-03-27 09:18:29,322 - Model - INFO - eval mIoU of Laptop 0.955396
+2021-03-27 09:18:29,322 - Model - INFO - eval mIoU of Motorbike 0.704572
+2021-03-27 09:18:29,322 - Model - INFO - eval mIoU of Mug 0.955338
+2021-03-27 09:18:29,322 - Model - INFO - eval mIoU of Pistol 0.828322
+2021-03-27 09:18:29,322 - Model - INFO - eval mIoU of Rocket 0.609072
+2021-03-27 09:18:29,322 - Model - INFO - eval mIoU of Skateboard 0.759401
+2021-03-27 09:18:29,322 - Model - INFO - eval mIoU of Table 0.822643
+2021-03-27 09:18:29,323 - Model - INFO - Epoch 95 test Accuracy: 0.942549 Class avg mIOU: 0.823192 Inctance avg mIOU: 0.850775
+2021-03-27 09:18:29,323 - Model - INFO - Best accuracy is: 0.94347
+2021-03-27 09:18:29,323 - Model - INFO - Best class avg mIOU is: 0.82357
+2021-03-27 09:18:29,323 - Model - INFO - Best inctance avg mIOU is: 0.85278
+2021-03-27 09:18:29,323 - Model - INFO - Epoch 96 (96/251):
+2021-03-27 09:18:29,323 - Model - INFO - Learning rate:0.000063
+2021-03-27 09:25:06,746 - Model - INFO - Train accuracy is: 0.95513
+2021-03-27 09:26:10,275 - Model - INFO - eval mIoU of Airplane 0.823796
+2021-03-27 09:26:10,276 - Model - INFO - eval mIoU of Bag 0.825110
+2021-03-27 09:26:10,276 - Model - INFO - eval mIoU of Cap 0.869870
+2021-03-27 09:26:10,276 - Model - INFO - eval mIoU of Car 0.775232
+2021-03-27 09:26:10,276 - Model - INFO - eval mIoU of Chair 0.904619
+2021-03-27 09:26:10,276 - Model - INFO - eval mIoU of Earphone 0.733311
+2021-03-27 09:26:10,276 - Model - INFO - eval mIoU of Guitar 0.909269
+2021-03-27 09:26:10,276 - Model - INFO - eval mIoU of Knife 0.868375
+2021-03-27 09:26:10,276 - Model - INFO - eval mIoU of Lamp 0.843610
+2021-03-27 09:26:10,276 - Model - INFO - eval mIoU of Laptop 0.955438
+2021-03-27 09:26:10,276 - Model - INFO - eval mIoU of Motorbike 0.721782
+2021-03-27 09:26:10,276 - Model - INFO - eval mIoU of Mug 0.936900
+2021-03-27 09:26:10,276 - Model - INFO - eval mIoU of Pistol 0.827788
+2021-03-27 09:26:10,276 - Model - INFO - eval mIoU of Rocket 0.574547
+2021-03-27 09:26:10,276 - Model - INFO - eval mIoU of Skateboard 0.748696
+2021-03-27 09:26:10,276 - Model - INFO - eval mIoU of Table 0.829900
+2021-03-27 09:26:10,277 - Model - INFO - Epoch 96 test Accuracy: 0.942471 Class avg mIOU: 0.821765 Inctance avg mIOU: 0.852106
+2021-03-27 09:26:10,277 - Model - INFO - Best accuracy is: 0.94347
+2021-03-27 09:26:10,277 - Model - INFO - Best class avg mIOU is: 0.82357
+2021-03-27 09:26:10,277 - Model - INFO - Best inctance avg mIOU is: 0.85278
+2021-03-27 09:26:10,277 - Model - INFO - Epoch 97 (97/251):
+2021-03-27 09:26:10,277 - Model - INFO - Learning rate:0.000063
+2021-03-27 09:32:38,606 - Model - INFO - Train accuracy is: 0.95541
+2021-03-27 09:33:43,654 - Model - INFO - eval mIoU of Airplane 0.826753
+2021-03-27 09:33:43,654 - Model - INFO - eval mIoU of Bag 0.760667
+2021-03-27 09:33:43,654 - Model - INFO - eval mIoU of Cap 0.845315
+2021-03-27 09:33:43,654 - Model - INFO - eval mIoU of Car 0.758823
+2021-03-27 09:33:43,655 - Model - INFO - eval mIoU of Chair 0.903205
+2021-03-27 09:33:43,655 - Model - INFO - eval mIoU of Earphone 0.698205
+2021-03-27 09:33:43,655 - Model - INFO - eval mIoU of Guitar 0.907540
+2021-03-27 09:33:43,655 - Model - INFO - eval mIoU of Knife 0.873534
+2021-03-27 09:33:43,655 - Model - INFO - eval mIoU of Lamp 0.844467
+2021-03-27 09:33:43,655 - Model - INFO - eval mIoU of Laptop 0.952759
+2021-03-27 09:33:43,655 - Model - INFO - eval mIoU of Motorbike 0.719698
+2021-03-27 09:33:43,655 - Model - INFO - eval mIoU of Mug 0.943373
+2021-03-27 09:33:43,655 - Model - INFO - eval mIoU of Pistol 0.798555
+2021-03-27 09:33:43,655 - Model - INFO - eval mIoU of Rocket 0.603458
+2021-03-27 09:33:43,655 - Model - INFO - eval mIoU of Skateboard 0.759148
+2021-03-27 09:33:43,655 - Model - INFO - eval mIoU of Table 0.822307
+2021-03-27 09:33:43,655 - Model - INFO - Epoch 97 test Accuracy: 0.942379 Class avg mIOU: 0.813613 Inctance avg mIOU: 0.848279
+2021-03-27 09:33:43,656 - Model - INFO - Best accuracy is: 0.94347
+2021-03-27 09:33:43,656 - Model - INFO - Best class avg mIOU is: 0.82357
+2021-03-27 09:33:43,656 - Model - INFO - Best inctance avg mIOU is: 0.85278
+2021-03-27 09:33:43,656 - Model - INFO - Epoch 98 (98/251):
+2021-03-27 09:33:43,656 - Model - INFO - Learning rate:0.000063
+2021-03-27 09:40:22,757 - Model - INFO - Train accuracy is: 0.95552
+2021-03-27 09:41:28,636 - Model - INFO - eval mIoU of Airplane 0.826248
+2021-03-27 09:41:28,636 - Model - INFO - eval mIoU of Bag 0.797118
+2021-03-27 09:41:28,636 - Model - INFO - eval mIoU of Cap 0.849881
+2021-03-27 09:41:28,636 - Model - INFO - eval mIoU of Car 0.773787
+2021-03-27 09:41:28,636 - Model - INFO - eval mIoU of Chair 0.906358
+2021-03-27 09:41:28,636 - Model - INFO - eval mIoU of Earphone 0.659574
+2021-03-27 09:41:28,636 - Model - INFO - eval mIoU of Guitar 0.908000
+2021-03-27 09:41:28,637 - Model - INFO - eval mIoU of Knife 0.873514
+2021-03-27 09:41:28,637 - Model - INFO - eval mIoU of Lamp 0.843776
+2021-03-27 09:41:28,637 - Model - INFO - eval mIoU of Laptop 0.952818
+2021-03-27 09:41:28,637 - Model - INFO - eval mIoU of Motorbike 0.720456
+2021-03-27 09:41:28,637 - Model - INFO - eval mIoU of Mug 0.938842
+2021-03-27 09:41:28,637 - Model - INFO - eval mIoU of Pistol 0.830365
+2021-03-27 09:41:28,637 - Model - INFO - eval mIoU of Rocket 0.610322
+2021-03-27 09:41:28,637 - Model - INFO - eval mIoU of Skateboard 0.758429
+2021-03-27 09:41:28,637 - Model - INFO - eval mIoU of Table 0.828669
+2021-03-27 09:41:28,637 - Model - INFO - Epoch 98 test Accuracy: 0.942921 Class avg mIOU: 0.817385 Inctance avg mIOU: 0.852117
+2021-03-27 09:41:28,637 - Model - INFO - Best accuracy is: 0.94347
+2021-03-27 09:41:28,637 - Model - INFO - Best class avg mIOU is: 0.82357
+2021-03-27 09:41:28,637 - Model - INFO - Best inctance avg mIOU is: 0.85278
+2021-03-27 09:41:28,638 - Model - INFO - Epoch 99 (99/251):
+2021-03-27 09:41:28,638 - Model - INFO - Learning rate:0.000063
+2021-03-27 09:47:56,142 - Model - INFO - Train accuracy is: 0.95546
+2021-03-27 09:48:59,931 - Model - INFO - eval mIoU of Airplane 0.825220
+2021-03-27 09:48:59,932 - Model - INFO - eval mIoU of Bag 0.829068
+2021-03-27 09:48:59,932 - Model - INFO - eval mIoU of Cap 0.841080
+2021-03-27 09:48:59,932 - Model - INFO - eval mIoU of Car 0.777416
+2021-03-27 09:48:59,932 - Model - INFO - eval mIoU of Chair 0.904740
+2021-03-27 09:48:59,932 - Model - INFO - eval mIoU of Earphone 0.723201
+2021-03-27 09:48:59,932 - Model - INFO - eval mIoU of Guitar 0.908147
+2021-03-27 09:48:59,932 - Model - INFO - eval mIoU of Knife 0.866521
+2021-03-27 09:48:59,932 - Model - INFO - eval mIoU of Lamp 0.843354
+2021-03-27 09:48:59,932 - Model - INFO - eval mIoU of Laptop 0.953390
+2021-03-27 09:48:59,932 - Model - INFO - eval mIoU of Motorbike 0.712863
+2021-03-27 09:48:59,932 - Model - INFO - eval mIoU of Mug 0.951730
+2021-03-27 09:48:59,932 - Model - INFO - eval mIoU of Pistol 0.819828
+2021-03-27 09:48:59,932 - Model - INFO - eval mIoU of Rocket 0.596072
+2021-03-27 09:48:59,932 - Model - INFO - eval mIoU of Skateboard 0.760402
+2021-03-27 09:48:59,932 - Model - INFO - eval mIoU of Table 0.817335
+2021-03-27 09:48:59,933 - Model - INFO - Epoch 99 test Accuracy: 0.942208 Class avg mIOU: 0.820648 Inctance avg mIOU: 0.848511
+2021-03-27 09:48:59,933 - Model - INFO - Best accuracy is: 0.94347
+2021-03-27 09:48:59,933 - Model - INFO - Best class avg mIOU is: 0.82357
+2021-03-27 09:48:59,933 - Model - INFO - Best inctance avg mIOU is: 0.85278
+2021-03-27 09:48:59,933 - Model - INFO - Epoch 100 (100/251):
+2021-03-27 09:48:59,933 - Model - INFO - Learning rate:0.000063
+2021-03-27 09:55:23,065 - Model - INFO - Train accuracy is: 0.95536
+2021-03-27 09:56:28,029 - Model - INFO - eval mIoU of Airplane 0.824302
+2021-03-27 09:56:28,029 - Model - INFO - eval mIoU of Bag 0.825866
+2021-03-27 09:56:28,029 - Model - INFO - eval mIoU of Cap 0.856732
+2021-03-27 09:56:28,029 - Model - INFO - eval mIoU of Car 0.771521
+2021-03-27 09:56:28,029 - Model - INFO - eval mIoU of Chair 0.904848
+2021-03-27 09:56:28,029 - Model - INFO - eval mIoU of Earphone 0.695765
+2021-03-27 09:56:28,029 - Model - INFO - eval mIoU of Guitar 0.908417
+2021-03-27 09:56:28,029 - Model - INFO - eval mIoU of Knife 0.861132
+2021-03-27 09:56:28,030 - Model - INFO - eval mIoU of Lamp 0.841733
+2021-03-27 09:56:28,030 - Model - INFO - eval mIoU of Laptop 0.954698
+2021-03-27 09:56:28,030 - Model - INFO - eval mIoU of Motorbike 0.720050
+2021-03-27 09:56:28,030 - Model - INFO - eval mIoU of Mug 0.940948
+2021-03-27 09:56:28,030 - Model - INFO - eval mIoU of Pistol 0.814430
+2021-03-27 09:56:28,030 - Model - INFO - eval mIoU of Rocket 0.591661
+2021-03-27 09:56:28,030 - Model - INFO - eval mIoU of Skateboard 0.761306
+2021-03-27 09:56:28,030 - Model - INFO - eval mIoU of Table 0.822767
+2021-03-27 09:56:28,030 - Model - INFO - Epoch 100 test Accuracy: 0.942488 Class avg mIOU: 0.818511 Inctance avg mIOU: 0.849253
+2021-03-27 09:56:28,030 - Model - INFO - Best accuracy is: 0.94347
+2021-03-27 09:56:28,031 - Model - INFO - Best class avg mIOU is: 0.82357
+2021-03-27 09:56:28,031 - Model - INFO - Best inctance avg mIOU is: 0.85278
+2021-03-27 09:56:28,031 - Model - INFO - Epoch 101 (101/251):
+2021-03-27 09:56:28,031 - Model - INFO - Learning rate:0.000031
+2021-03-27 10:03:04,721 - Model - INFO - Train accuracy is: 0.95588
+2021-03-27 10:04:09,981 - Model - INFO - eval mIoU of Airplane 0.826012
+2021-03-27 10:04:09,982 - Model - INFO - eval mIoU of Bag 0.824327
+2021-03-27 10:04:09,982 - Model - INFO - eval mIoU of Cap 0.855253
+2021-03-27 10:04:09,982 - Model - INFO - eval mIoU of Car 0.774805
+2021-03-27 10:04:09,982 - Model - INFO - eval mIoU of Chair 0.905008
+2021-03-27 10:04:09,982 - Model - INFO - eval mIoU of Earphone 0.688811
+2021-03-27 10:04:09,982 - Model - INFO - eval mIoU of Guitar 0.907266
+2021-03-27 10:04:09,982 - Model - INFO - eval mIoU of Knife 0.867374
+2021-03-27 10:04:09,982 - Model - INFO - eval mIoU of Lamp 0.849578
+2021-03-27 10:04:09,982 - Model - INFO - eval mIoU of Laptop 0.953213
+2021-03-27 10:04:09,982 - Model - INFO - eval mIoU of Motorbike 0.726796
+2021-03-27 10:04:09,982 - Model - INFO - eval mIoU of Mug 0.948354
+2021-03-27 10:04:09,982 - Model - INFO - eval mIoU of Pistol 0.815744
+2021-03-27 10:04:09,982 - Model - INFO - eval mIoU of Rocket 0.608375
+2021-03-27 10:04:09,982 - Model - INFO - eval mIoU of Skateboard 0.760429
+2021-03-27 10:04:09,982 - Model - INFO - eval mIoU of Table 0.825747
+2021-03-27 10:04:09,982 - Model - INFO - Epoch 101 test Accuracy: 0.943821 Class avg mIOU: 0.821068 Inctance avg mIOU: 0.851653
+2021-03-27 10:04:09,983 - Model - INFO - Best accuracy is: 0.94382
+2021-03-27 10:04:09,983 - Model - INFO - Best class avg mIOU is: 0.82357
+2021-03-27 10:04:09,983 - Model - INFO - Best inctance avg mIOU is: 0.85278
+2021-03-27 10:04:09,983 - Model - INFO - Epoch 102 (102/251):
+2021-03-27 10:04:09,983 - Model - INFO - Learning rate:0.000031
+2021-03-27 10:10:41,731 - Model - INFO - Train accuracy is: 0.95589
+2021-03-27 10:11:48,257 - Model - INFO - eval mIoU of Airplane 0.824157
+2021-03-27 10:11:48,258 - Model - INFO - eval mIoU of Bag 0.800720
+2021-03-27 10:11:48,258 - Model - INFO - eval mIoU of Cap 0.851515
+2021-03-27 10:11:48,258 - Model - INFO - eval mIoU of Car 0.779837
+2021-03-27 10:11:48,258 - Model - INFO - eval mIoU of Chair 0.908337
+2021-03-27 10:11:48,258 - Model - INFO - eval mIoU of Earphone 0.700364
+2021-03-27 10:11:48,258 - Model - INFO - eval mIoU of Guitar 0.911977
+2021-03-27 10:11:48,258 - Model - INFO - eval mIoU of Knife 0.875019
+2021-03-27 10:11:48,258 - Model - INFO - eval mIoU of Lamp 0.840814
+2021-03-27 10:11:48,258 - Model - INFO - eval mIoU of Laptop 0.954378
+2021-03-27 10:11:48,258 - Model - INFO - eval mIoU of Motorbike 0.723621
+2021-03-27 10:11:48,258 - Model - INFO - eval mIoU of Mug 0.953698
+2021-03-27 10:11:48,258 - Model - INFO - eval mIoU of Pistol 0.812563
+2021-03-27 10:11:48,258 - Model - INFO - eval mIoU of Rocket 0.599946
+2021-03-27 10:11:48,258 - Model - INFO - eval mIoU of Skateboard 0.764851
+2021-03-27 10:11:48,258 - Model - INFO - eval mIoU of Table 0.824961
+2021-03-27 10:11:48,259 - Model - INFO - Epoch 102 test Accuracy: 0.942957 Class avg mIOU: 0.820422 Inctance avg mIOU: 0.851834
+2021-03-27 10:11:48,259 - Model - INFO - Best accuracy is: 0.94382
+2021-03-27 10:11:48,259 - Model - INFO - Best class avg mIOU is: 0.82357
+2021-03-27 10:11:48,259 - Model - INFO - Best inctance avg mIOU is: 0.85278
+2021-03-27 10:11:48,259 - Model - INFO - Epoch 103 (103/251):
+2021-03-27 10:11:48,259 - Model - INFO - Learning rate:0.000031
+2021-03-27 10:18:18,050 - Model - INFO - Train accuracy is: 0.95630
+2021-03-27 10:19:24,568 - Model - INFO - eval mIoU of Airplane 0.822597
+2021-03-27 10:19:24,569 - Model - INFO - eval mIoU of Bag 0.814869
+2021-03-27 10:19:24,569 - Model - INFO - eval mIoU of Cap 0.850399
+2021-03-27 10:19:24,569 - Model - INFO - eval mIoU of Car 0.777575
+2021-03-27 10:19:24,569 - Model - INFO - eval mIoU of Chair 0.904002
+2021-03-27 10:19:24,569 - Model - INFO - eval mIoU of Earphone 0.702001
+2021-03-27 10:19:24,569 - Model - INFO - eval mIoU of Guitar 0.910488
+2021-03-27 10:19:24,569 - Model - INFO - eval mIoU of Knife 0.867567
+2021-03-27 10:19:24,569 - Model - INFO - eval mIoU of Lamp 0.846254
+2021-03-27 10:19:24,569 - Model - INFO - eval mIoU of Laptop 0.954049
+2021-03-27 10:19:24,569 - Model - INFO - eval mIoU of Motorbike 0.731260
+2021-03-27 10:19:24,569 - Model - INFO - eval mIoU of Mug 0.946086
+2021-03-27 10:19:24,569 - Model - INFO - eval mIoU of Pistol 0.825348
+2021-03-27 10:19:24,570 - Model - INFO - eval mIoU of Rocket 0.612306
+2021-03-27 10:19:24,570 - Model - INFO - eval mIoU of Skateboard 0.761618
+2021-03-27 10:19:24,570 - Model - INFO - eval mIoU of Table 0.825705
+2021-03-27 10:19:24,570 - Model - INFO - Epoch 103 test Accuracy: 0.943157 Class avg mIOU: 0.822008 Inctance avg mIOU: 0.851244
+2021-03-27 10:19:24,570 - Model - INFO - Best accuracy is: 0.94382
+2021-03-27 10:19:24,570 - Model - INFO - Best class avg mIOU is: 0.82357
+2021-03-27 10:19:24,570 - Model - INFO - Best inctance avg mIOU is: 0.85278
+2021-03-27 10:19:24,570 - Model - INFO - Epoch 104 (104/251):
+2021-03-27 10:19:24,571 - Model - INFO - Learning rate:0.000031
+2021-03-27 10:26:00,887 - Model - INFO - Train accuracy is: 0.95609
+2021-03-27 10:27:06,654 - Model - INFO - eval mIoU of Airplane 0.823162
+2021-03-27 10:27:06,654 - Model - INFO - eval mIoU of Bag 0.800965
+2021-03-27 10:27:06,654 - Model - INFO - eval mIoU of Cap 0.855990
+2021-03-27 10:27:06,654 - Model - INFO - eval mIoU of Car 0.785224
+2021-03-27 10:27:06,654 - Model - INFO - eval mIoU of Chair 0.906122
+2021-03-27 10:27:06,654 - Model - INFO - eval mIoU of Earphone 0.695857
+2021-03-27 10:27:06,654 - Model - INFO - eval mIoU of Guitar 0.909063
+2021-03-27 10:27:06,654 - Model - INFO - eval mIoU of Knife 0.868822
+2021-03-27 10:27:06,655 - Model - INFO - eval mIoU of Lamp 0.848163
+2021-03-27 10:27:06,655 - Model - INFO - eval mIoU of Laptop 0.955621
+2021-03-27 10:27:06,655 - Model - INFO - eval mIoU of Motorbike 0.718112
+2021-03-27 10:27:06,655 - Model - INFO - eval mIoU of Mug 0.952934
+2021-03-27 10:27:06,655 - Model - INFO - eval mIoU of Pistol 0.823164
+2021-03-27 10:27:06,655 - Model - INFO - eval mIoU of Rocket 0.595510
+2021-03-27 10:27:06,655 - Model - INFO - eval mIoU of Skateboard 0.762034
+2021-03-27 10:27:06,655 - Model - INFO - eval mIoU of Table 0.828051
+2021-03-27 10:27:06,655 - Model - INFO - Epoch 104 test Accuracy: 0.942932 Class avg mIOU: 0.820550 Inctance avg mIOU: 0.852816
+2021-03-27 10:27:06,655 - Model - INFO - Save model...
+2021-03-27 10:27:06,655 - Model - INFO - Saving at log/part_seg/pointnet2_part_seg_msg/checkpoints/best_model.pth
+2021-03-27 10:27:06,729 - Model - INFO - Saving model....
+2021-03-27 10:27:06,729 - Model - INFO - Best accuracy is: 0.94382
+2021-03-27 10:27:06,729 - Model - INFO - Best class avg mIOU is: 0.82357
+2021-03-27 10:27:06,730 - Model - INFO - Best inctance avg mIOU is: 0.85282
+2021-03-27 10:27:06,730 - Model - INFO - Epoch 105 (105/251):
+2021-03-27 10:27:06,730 - Model - INFO - Learning rate:0.000031
+2021-03-27 10:33:35,235 - Model - INFO - Train accuracy is: 0.95600
+2021-03-27 10:34:39,170 - Model - INFO - eval mIoU of Airplane 0.829394
+2021-03-27 10:34:39,170 - Model - INFO - eval mIoU of Bag 0.824200
+2021-03-27 10:34:39,170 - Model - INFO - eval mIoU of Cap 0.850190
+2021-03-27 10:34:39,170 - Model - INFO - eval mIoU of Car 0.780185
+2021-03-27 10:34:39,170 - Model - INFO - eval mIoU of Chair 0.902932
+2021-03-27 10:34:39,170 - Model - INFO - eval mIoU of Earphone 0.717672
+2021-03-27 10:34:39,170 - Model - INFO - eval mIoU of Guitar 0.911124
+2021-03-27 10:34:39,170 - Model - INFO - eval mIoU of Knife 0.875801
+2021-03-27 10:34:39,170 - Model - INFO - eval mIoU of Lamp 0.844416
+2021-03-27 10:34:39,170 - Model - INFO - eval mIoU of Laptop 0.954955
+2021-03-27 10:34:39,170 - Model - INFO - eval mIoU of Motorbike 0.720151
+2021-03-27 10:34:39,171 - Model - INFO - eval mIoU of Mug 0.949274
+2021-03-27 10:34:39,171 - Model - INFO - eval mIoU of Pistol 0.814151
+2021-03-27 10:34:39,171 - Model - INFO - eval mIoU of Rocket 0.620889
+2021-03-27 10:34:39,171 - Model - INFO - eval mIoU of Skateboard 0.760082
+2021-03-27 10:34:39,171 - Model - INFO - eval mIoU of Table 0.828382
+2021-03-27 10:34:39,171 - Model - INFO - Epoch 105 test Accuracy: 0.943879 Class avg mIOU: 0.823987 Inctance avg mIOU: 0.852643
+2021-03-27 10:34:39,171 - Model - INFO - Best accuracy is: 0.94388
+2021-03-27 10:34:39,171 - Model - INFO - Best class avg mIOU is: 0.82399
+2021-03-27 10:34:39,171 - Model - INFO - Best inctance avg mIOU is: 0.85282
+2021-03-27 10:34:39,171 - Model - INFO - Epoch 106 (106/251):
+2021-03-27 10:34:39,172 - Model - INFO - Learning rate:0.000031
+2021-03-27 10:41:13,990 - Model - INFO - Train accuracy is: 0.95613
+2021-03-27 10:42:20,067 - Model - INFO - eval mIoU of Airplane 0.827407
+2021-03-27 10:42:20,068 - Model - INFO - eval mIoU of Bag 0.832832
+2021-03-27 10:42:20,068 - Model - INFO - eval mIoU of Cap 0.863044
+2021-03-27 10:42:20,068 - Model - INFO - eval mIoU of Car 0.775336
+2021-03-27 10:42:20,068 - Model - INFO - eval mIoU of Chair 0.904358
+2021-03-27 10:42:20,068 - Model - INFO - eval mIoU of Earphone 0.734064
+2021-03-27 10:42:20,068 - Model - INFO - eval mIoU of Guitar 0.909029
+2021-03-27 10:42:20,068 - Model - INFO - eval mIoU of Knife 0.878734
+2021-03-27 10:42:20,068 - Model - INFO - eval mIoU of Lamp 0.844127
+2021-03-27 10:42:20,069 - Model - INFO - eval mIoU of Laptop 0.954575
+2021-03-27 10:42:20,069 - Model - INFO - eval mIoU of Motorbike 0.730599
+2021-03-27 10:42:20,069 - Model - INFO - eval mIoU of Mug 0.951984
+2021-03-27 10:42:20,069 - Model - INFO - eval mIoU of Pistol 0.815126
+2021-03-27 10:42:20,069 - Model - INFO - eval mIoU of Rocket 0.600459
+2021-03-27 10:42:20,069 - Model - INFO - eval mIoU of Skateboard 0.762866
+2021-03-27 10:42:20,069 - Model - INFO - eval mIoU of Table 0.832732
+2021-03-27 10:42:20,069 - Model - INFO - Epoch 106 test Accuracy: 0.943322 Class avg mIOU: 0.826080 Inctance avg mIOU: 0.854052
+2021-03-27 10:42:20,069 - Model - INFO - Save model...
+2021-03-27 10:42:20,069 - Model - INFO - Saving at log/part_seg/pointnet2_part_seg_msg/checkpoints/best_model.pth
+2021-03-27 10:42:20,187 - Model - INFO - Saving model....
+2021-03-27 10:42:20,187 - Model - INFO - Best accuracy is: 0.94388
+2021-03-27 10:42:20,187 - Model - INFO - Best class avg mIOU is: 0.82608
+2021-03-27 10:42:20,187 - Model - INFO - Best inctance avg mIOU is: 0.85405
+2021-03-27 10:42:20,188 - Model - INFO - Epoch 107 (107/251):
+2021-03-27 10:42:20,188 - Model - INFO - Learning rate:0.000031
+2021-03-27 10:48:56,194 - Model - INFO - Train accuracy is: 0.95616
+2021-03-27 10:50:00,970 - Model - INFO - eval mIoU of Airplane 0.827786
+2021-03-27 10:50:00,971 - Model - INFO - eval mIoU of Bag 0.834039
+2021-03-27 10:50:00,971 - Model - INFO - eval mIoU of Cap 0.855453
+2021-03-27 10:50:00,971 - Model - INFO - eval mIoU of Car 0.779957
+2021-03-27 10:50:00,971 - Model - INFO - eval mIoU of Chair 0.905642
+2021-03-27 10:50:00,971 - Model - INFO - eval mIoU of Earphone 0.698059
+2021-03-27 10:50:00,971 - Model - INFO - eval mIoU of Guitar 0.913117
+2021-03-27 10:50:00,971 - Model - INFO - eval mIoU of Knife 0.868074
+2021-03-27 10:50:00,971 - Model - INFO - eval mIoU of Lamp 0.843700
+2021-03-27 10:50:00,972 - Model - INFO - eval mIoU of Laptop 0.955951
+2021-03-27 10:50:00,972 - Model - INFO - eval mIoU of Motorbike 0.733807
+2021-03-27 10:50:00,972 - Model - INFO - eval mIoU of Mug 0.951518
+2021-03-27 10:50:00,972 - Model - INFO - eval mIoU of Pistol 0.808586
+2021-03-27 10:50:00,972 - Model - INFO - eval mIoU of Rocket 0.612057
+2021-03-27 10:50:00,972 - Model - INFO - eval mIoU of Skateboard 0.759218
+2021-03-27 10:50:00,972 - Model - INFO - eval mIoU of Table 0.824970
+2021-03-27 10:50:00,972 - Model - INFO - Epoch 107 test Accuracy: 0.942826 Class avg mIOU: 0.823246 Inctance avg mIOU: 0.852062
+2021-03-27 10:50:00,973 - Model - INFO - Best accuracy is: 0.94388
+2021-03-27 10:50:00,973 - Model - INFO - Best class avg mIOU is: 0.82608
+2021-03-27 10:50:00,973 - Model - INFO - Best inctance avg mIOU is: 0.85405
+2021-03-27 10:50:00,973 - Model - INFO - Epoch 108 (108/251):
+2021-03-27 10:50:00,973 - Model - INFO - Learning rate:0.000031
+2021-03-27 10:56:28,958 - Model - INFO - Train accuracy is: 0.95624
+2021-03-27 10:57:33,472 - Model - INFO - eval mIoU of Airplane 0.828749
+2021-03-27 10:57:33,472 - Model - INFO - eval mIoU of Bag 0.813954
+2021-03-27 10:57:33,472 - Model - INFO - eval mIoU of Cap 0.849883
+2021-03-27 10:57:33,472 - Model - INFO - eval mIoU of Car 0.775266
+2021-03-27 10:57:33,472 - Model - INFO - eval mIoU of Chair 0.906076
+2021-03-27 10:57:33,472 - Model - INFO - eval mIoU of Earphone 0.708212
+2021-03-27 10:57:33,472 - Model - INFO - eval mIoU of Guitar 0.911677
+2021-03-27 10:57:33,472 - Model - INFO - eval mIoU of Knife 0.863633
+2021-03-27 10:57:33,473 - Model - INFO - eval mIoU of Lamp 0.845272
+2021-03-27 10:57:33,473 - Model - INFO - eval mIoU of Laptop 0.955609
+2021-03-27 10:57:33,473 - Model - INFO - eval mIoU of Motorbike 0.726924
+2021-03-27 10:57:33,473 - Model - INFO - eval mIoU of Mug 0.951129
+2021-03-27 10:57:33,473 - Model - INFO - eval mIoU of Pistol 0.817873
+2021-03-27 10:57:33,473 - Model - INFO - eval mIoU of Rocket 0.620992
+2021-03-27 10:57:33,473 - Model - INFO - eval mIoU of Skateboard 0.761999
+2021-03-27 10:57:33,473 - Model - INFO - eval mIoU of Table 0.826389
+2021-03-27 10:57:33,473 - Model - INFO - Epoch 108 test Accuracy: 0.943230 Class avg mIOU: 0.822727 Inctance avg mIOU: 0.852400
+2021-03-27 10:57:33,473 - Model - INFO - Best accuracy is: 0.94388
+2021-03-27 10:57:33,473 - Model - INFO - Best class avg mIOU is: 0.82608
+2021-03-27 10:57:33,473 - Model - INFO - Best inctance avg mIOU is: 0.85405
+2021-03-27 10:57:33,474 - Model - INFO - Epoch 109 (109/251):
+2021-03-27 10:57:33,474 - Model - INFO - Learning rate:0.000031
+2021-03-27 11:04:06,550 - Model - INFO - Train accuracy is: 0.95633
+2021-03-27 11:05:14,626 - Model - INFO - eval mIoU of Airplane 0.828025
+2021-03-27 11:05:14,627 - Model - INFO - eval mIoU of Bag 0.818662
+2021-03-27 11:05:14,627 - Model - INFO - eval mIoU of Cap 0.855456
+2021-03-27 11:05:14,627 - Model - INFO - eval mIoU of Car 0.779414
+2021-03-27 11:05:14,627 - Model - INFO - eval mIoU of Chair 0.905986
+2021-03-27 11:05:14,627 - Model - INFO - eval mIoU of Earphone 0.694806
+2021-03-27 11:05:14,627 - Model - INFO - eval mIoU of Guitar 0.911982
+2021-03-27 11:05:14,627 - Model - INFO - eval mIoU of Knife 0.869707
+2021-03-27 11:05:14,627 - Model - INFO - eval mIoU of Lamp 0.843582
+2021-03-27 11:05:14,627 - Model - INFO - eval mIoU of Laptop 0.954939
+2021-03-27 11:05:14,627 - Model - INFO - eval mIoU of Motorbike 0.723228
+2021-03-27 11:05:14,627 - Model - INFO - eval mIoU of Mug 0.948590
+2021-03-27 11:05:14,627 - Model - INFO - eval mIoU of Pistol 0.813100
+2021-03-27 11:05:14,627 - Model - INFO - eval mIoU of Rocket 0.613642
+2021-03-27 11:05:14,627 - Model - INFO - eval mIoU of Skateboard 0.759146
+2021-03-27 11:05:14,628 - Model - INFO - eval mIoU of Table 0.827292
+2021-03-27 11:05:14,628 - Model - INFO - Epoch 109 test Accuracy: 0.943171 Class avg mIOU: 0.821722 Inctance avg mIOU: 0.852530
+2021-03-27 11:05:14,628 - Model - INFO - Best accuracy is: 0.94388
+2021-03-27 11:05:14,628 - Model - INFO - Best class avg mIOU is: 0.82608
+2021-03-27 11:05:14,629 - Model - INFO - Best inctance avg mIOU is: 0.85405
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/part_seg/pointnet2_part_seg_msg/pointnet2_part_seg_msg.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/part_seg/pointnet2_part_seg_msg/pointnet2_part_seg_msg.py
new file mode 100644
index 0000000000..de11b4e846
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/part_seg/pointnet2_part_seg_msg/pointnet2_part_seg_msg.py
@@ -0,0 +1,59 @@
+import torch.nn as nn
+import torch
+import torch.nn.functional as F
+from models.pointnet2_utils import PointNetSetAbstractionMsg,PointNetSetAbstraction,PointNetFeaturePropagation
+
+
+class get_model(nn.Module):
+ def __init__(self, num_classes, normal_channel=False):
+ super(get_model, self).__init__()
+ if normal_channel:
+ additional_channel = 3
+ else:
+ additional_channel = 0
+ self.normal_channel = normal_channel
+ self.sa1 = PointNetSetAbstractionMsg(512, [0.1, 0.2, 0.4], [32, 64, 128], 3+additional_channel, [[32, 32, 64], [64, 64, 128], [64, 96, 128]])
+ self.sa2 = PointNetSetAbstractionMsg(128, [0.4,0.8], [64, 128], 128+128+64, [[128, 128, 256], [128, 196, 256]])
+ self.sa3 = PointNetSetAbstraction(npoint=None, radius=None, nsample=None, in_channel=512 + 3, mlp=[256, 512, 1024], group_all=True)
+ self.fp3 = PointNetFeaturePropagation(in_channel=1536, mlp=[256, 256])
+ self.fp2 = PointNetFeaturePropagation(in_channel=576, mlp=[256, 128])
+ self.fp1 = PointNetFeaturePropagation(in_channel=150+additional_channel, mlp=[128, 128])
+ self.conv1 = nn.Conv1d(128, 128, 1)
+ self.bn1 = nn.BatchNorm1d(128)
+ self.drop1 = nn.Dropout(0.5)
+ self.conv2 = nn.Conv1d(128, num_classes, 1)
+
+ def forward(self, xyz, cls_label):
+ # Set Abstraction layers
+ B,C,N = xyz.shape
+ if self.normal_channel:
+ l0_points = xyz
+ l0_xyz = xyz[:,:3,:]
+ else:
+ l0_points = xyz
+ l0_xyz = xyz
+ l1_xyz, l1_points = self.sa1(l0_xyz, l0_points)
+ l2_xyz, l2_points = self.sa2(l1_xyz, l1_points)
+ l3_xyz, l3_points = self.sa3(l2_xyz, l2_points)
+ # Feature Propagation layers
+ l2_points = self.fp3(l2_xyz, l3_xyz, l2_points, l3_points)
+ l1_points = self.fp2(l1_xyz, l2_xyz, l1_points, l2_points)
+ cls_label_one_hot = cls_label.view(B,16,1).repeat(1,1,N)
+ l0_points = self.fp1(l0_xyz, l1_xyz, torch.cat([cls_label_one_hot,l0_xyz,l0_points],1), l1_points)
+ # FC layers
+ feat = F.relu(self.bn1(self.conv1(l0_points)), inplace=True)
+ x = self.drop1(feat)
+ x = self.conv2(x)
+ x = F.log_softmax(x, dim=1)
+ x = x.permute(0, 2, 1)
+ return x, l3_points
+
+
+class get_loss(nn.Module):
+ def __init__(self):
+ super(get_loss, self).__init__()
+
+ def forward(self, pred, target, trans_feat):
+ total_loss = F.nll_loss(pred, target)
+
+ return total_loss
\ No newline at end of file
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/part_seg/pointnet2_part_seg_msg/pointnet2_utils.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/part_seg/pointnet2_part_seg_msg/pointnet2_utils.py
new file mode 100644
index 0000000000..4ef8763727
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/part_seg/pointnet2_part_seg_msg/pointnet2_utils.py
@@ -0,0 +1,316 @@
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+from time import time
+import numpy as np
+
+def timeit(tag, t):
+ print("{}: {}s".format(tag, time() - t))
+ return time()
+
+def pc_normalize(pc):
+ l = pc.shape[0]
+ centroid = np.mean(pc, axis=0)
+ pc = pc - centroid
+ m = np.max(np.sqrt(np.sum(pc**2, axis=1)))
+ pc = pc / m
+ return pc
+
+def square_distance(src, dst):
+ """
+ Calculate Euclid distance between each two points.
+
+ src^T * dst = xn * xm + yn * ym + zn * zmï¼›
+ sum(src^2, dim=-1) = xn*xn + yn*yn + zn*zn;
+ sum(dst^2, dim=-1) = xm*xm + ym*ym + zm*zm;
+ dist = (xn - xm)^2 + (yn - ym)^2 + (zn - zm)^2
+ = sum(src**2,dim=-1)+sum(dst**2,dim=-1)-2*src^T*dst
+
+ Input:
+ src: source points, [B, N, C]
+ dst: target points, [B, M, C]
+ Output:
+ dist: per-point square distance, [B, N, M]
+ """
+ B, N, _ = src.shape
+ _, M, _ = dst.shape
+ dist = -2 * torch.matmul(src, dst.permute(0, 2, 1))
+ dist += torch.sum(src ** 2, -1).view(B, N, 1)
+ dist += torch.sum(dst ** 2, -1).view(B, 1, M)
+ return dist
+
+
+def index_points(points, idx):
+ """
+
+ Input:
+ points: input points data, [B, N, C]
+ idx: sample index data, [B, S]
+ Return:
+ new_points:, indexed points data, [B, S, C]
+ """
+ device = points.device
+ B = points.shape[0]
+ view_shape = list(idx.shape)
+ view_shape[1:] = [1] * (len(view_shape) - 1)
+ repeat_shape = list(idx.shape)
+ repeat_shape[0] = 1
+ batch_indices = torch.arange(B, dtype=torch.long).to(device).view(view_shape).repeat(repeat_shape)
+ new_points = points[batch_indices, idx, :]
+ return new_points
+
+
+def farthest_point_sample(xyz, npoint):
+ """
+ Input:
+ xyz: pointcloud data, [B, N, 3]
+ npoint: number of samples
+ Return:
+ centroids: sampled pointcloud index, [B, npoint]
+ """
+ device = xyz.device
+ B, N, C = xyz.shape
+ centroids = torch.zeros(B, npoint, dtype=torch.long).to(device)
+ distance = torch.ones(B, N).to(device) * 1e10
+ farthest = torch.randint(0, N, (B,), dtype=torch.long).to(device)
+ batch_indices = torch.arange(B, dtype=torch.long).to(device)
+ for i in range(npoint):
+ centroids[:, i] = farthest
+ centroid = xyz[batch_indices, farthest, :].view(B, 1, 3)
+ dist = torch.sum((xyz - centroid) ** 2, -1)
+ mask = dist < distance
+ distance[mask] = dist[mask]
+ farthest = torch.max(distance, -1)[1]
+ return centroids
+
+
+def query_ball_point(radius, nsample, xyz, new_xyz):
+ """
+ Input:
+ radius: local region radius
+ nsample: max sample number in local region
+ xyz: all points, [B, N, 3]
+ new_xyz: query points, [B, S, 3]
+ Return:
+ group_idx: grouped points index, [B, S, nsample]
+ """
+ device = xyz.device
+ B, N, C = xyz.shape
+ _, S, _ = new_xyz.shape
+ group_idx = torch.arange(N, dtype=torch.long).to(device).view(1, 1, N).repeat([B, S, 1])
+ sqrdists = square_distance(new_xyz, xyz)
+ group_idx[sqrdists > radius ** 2] = N
+ group_idx = group_idx.sort(dim=-1)[0][:, :, :nsample]
+ group_first = group_idx[:, :, 0].view(B, S, 1).repeat([1, 1, nsample])
+ mask = group_idx == N
+ group_idx[mask] = group_first[mask]
+ return group_idx
+
+
+def sample_and_group(npoint, radius, nsample, xyz, points, returnfps=False):
+ """
+ Input:
+ npoint:
+ radius:
+ nsample:
+ xyz: input points position data, [B, N, 3]
+ points: input points data, [B, N, D]
+ Return:
+ new_xyz: sampled points position data, [B, npoint, nsample, 3]
+ new_points: sampled points data, [B, npoint, nsample, 3+D]
+ """
+ B, N, C = xyz.shape
+ S = npoint
+ fps_idx = farthest_point_sample(xyz, npoint) # [B, npoint, C]
+ new_xyz = index_points(xyz, fps_idx)
+ idx = query_ball_point(radius, nsample, xyz, new_xyz)
+ grouped_xyz = index_points(xyz, idx) # [B, npoint, nsample, C]
+ grouped_xyz_norm = grouped_xyz - new_xyz.view(B, S, 1, C)
+
+ if points is not None:
+ grouped_points = index_points(points, idx)
+ new_points = torch.cat([grouped_xyz_norm, grouped_points], dim=-1) # [B, npoint, nsample, C+D]
+ else:
+ new_points = grouped_xyz_norm
+ if returnfps:
+ return new_xyz, new_points, grouped_xyz, fps_idx
+ else:
+ return new_xyz, new_points
+
+
+def sample_and_group_all(xyz, points):
+ """
+ Input:
+ xyz: input points position data, [B, N, 3]
+ points: input points data, [B, N, D]
+ Return:
+ new_xyz: sampled points position data, [B, 1, 3]
+ new_points: sampled points data, [B, 1, N, 3+D]
+ """
+ device = xyz.device
+ B, N, C = xyz.shape
+ new_xyz = torch.zeros(B, 1, C).to(device)
+ grouped_xyz = xyz.view(B, 1, N, C)
+ if points is not None:
+ new_points = torch.cat([grouped_xyz, points.view(B, 1, N, -1)], dim=-1)
+ else:
+ new_points = grouped_xyz
+ return new_xyz, new_points
+
+
+class PointNetSetAbstraction(nn.Module):
+ def __init__(self, npoint, radius, nsample, in_channel, mlp, group_all):
+ super(PointNetSetAbstraction, self).__init__()
+ self.npoint = npoint
+ self.radius = radius
+ self.nsample = nsample
+ self.mlp_convs = nn.ModuleList()
+ self.mlp_bns = nn.ModuleList()
+ last_channel = in_channel
+ for out_channel in mlp:
+ self.mlp_convs.append(nn.Conv2d(last_channel, out_channel, 1))
+ self.mlp_bns.append(nn.BatchNorm2d(out_channel))
+ last_channel = out_channel
+ self.group_all = group_all
+
+ def forward(self, xyz, points):
+ """
+ Input:
+ xyz: input points position data, [B, C, N]
+ points: input points data, [B, D, N]
+ Return:
+ new_xyz: sampled points position data, [B, C, S]
+ new_points_concat: sample points feature data, [B, D', S]
+ """
+ xyz = xyz.permute(0, 2, 1)
+ if points is not None:
+ points = points.permute(0, 2, 1)
+
+ if self.group_all:
+ new_xyz, new_points = sample_and_group_all(xyz, points)
+ else:
+ new_xyz, new_points = sample_and_group(self.npoint, self.radius, self.nsample, xyz, points)
+ # new_xyz: sampled points position data, [B, npoint, C]
+ # new_points: sampled points data, [B, npoint, nsample, C+D]
+ new_points = new_points.permute(0, 3, 2, 1) # [B, C+D, nsample,npoint]
+ for i, conv in enumerate(self.mlp_convs):
+ bn = self.mlp_bns[i]
+ new_points = F.relu(bn(conv(new_points)), inplace=True)
+
+ new_points = torch.max(new_points, 2)[0]
+ new_xyz = new_xyz.permute(0, 2, 1)
+ return new_xyz, new_points
+
+
+class PointNetSetAbstractionMsg(nn.Module):
+ def __init__(self, npoint, radius_list, nsample_list, in_channel, mlp_list):
+ super(PointNetSetAbstractionMsg, self).__init__()
+ self.npoint = npoint
+ self.radius_list = radius_list
+ self.nsample_list = nsample_list
+ self.conv_blocks = nn.ModuleList()
+ self.bn_blocks = nn.ModuleList()
+ for i in range(len(mlp_list)):
+ convs = nn.ModuleList()
+ bns = nn.ModuleList()
+ last_channel = in_channel + 3
+ for out_channel in mlp_list[i]:
+ convs.append(nn.Conv2d(last_channel, out_channel, 1))
+ bns.append(nn.BatchNorm2d(out_channel))
+ last_channel = out_channel
+ self.conv_blocks.append(convs)
+ self.bn_blocks.append(bns)
+
+ def forward(self, xyz, points):
+ """
+ Input:
+ xyz: input points position data, [B, C, N]
+ points: input points data, [B, D, N]
+ Return:
+ new_xyz: sampled points position data, [B, C, S]
+ new_points_concat: sample points feature data, [B, D', S]
+ """
+ xyz = xyz.permute(0, 2, 1)
+ if points is not None:
+ points = points.permute(0, 2, 1)
+
+ B, N, C = xyz.shape
+ S = self.npoint
+ new_xyz = index_points(xyz, farthest_point_sample(xyz, S))
+ new_points_list = []
+ for i, radius in enumerate(self.radius_list):
+ K = self.nsample_list[i]
+ group_idx = query_ball_point(radius, K, xyz, new_xyz)
+ grouped_xyz = index_points(xyz, group_idx)
+ grouped_xyz -= new_xyz.view(B, S, 1, C)
+ if points is not None:
+ grouped_points = index_points(points, group_idx)
+ grouped_points = torch.cat([grouped_points, grouped_xyz], dim=-1)
+ else:
+ grouped_points = grouped_xyz
+
+ grouped_points = grouped_points.permute(0, 3, 2, 1) # [B, D, K, S]
+ for j in range(len(self.conv_blocks[i])):
+ conv = self.conv_blocks[i][j]
+ bn = self.bn_blocks[i][j]
+ grouped_points = F.relu(bn(conv(grouped_points)), inplace=True)
+ new_points = torch.max(grouped_points, 2)[0] # [B, D', S]
+ new_points_list.append(new_points)
+
+ new_xyz = new_xyz.permute(0, 2, 1)
+ new_points_concat = torch.cat(new_points_list, dim=1)
+ return new_xyz, new_points_concat
+
+
+class PointNetFeaturePropagation(nn.Module):
+ def __init__(self, in_channel, mlp):
+ super(PointNetFeaturePropagation, self).__init__()
+ self.mlp_convs = nn.ModuleList()
+ self.mlp_bns = nn.ModuleList()
+ last_channel = in_channel
+ for out_channel in mlp:
+ self.mlp_convs.append(nn.Conv1d(last_channel, out_channel, 1))
+ self.mlp_bns.append(nn.BatchNorm1d(out_channel))
+ last_channel = out_channel
+
+ def forward(self, xyz1, xyz2, points1, points2):
+ """
+ Input:
+ xyz1: input points position data, [B, C, N]
+ xyz2: sampled input points position data, [B, C, S]
+ points1: input points data, [B, D, N]
+ points2: input points data, [B, D, S]
+ Return:
+ new_points: upsampled points data, [B, D', N]
+ """
+ xyz1 = xyz1.permute(0, 2, 1)
+ xyz2 = xyz2.permute(0, 2, 1)
+
+ points2 = points2.permute(0, 2, 1)
+ B, N, C = xyz1.shape
+ _, S, _ = xyz2.shape
+
+ if S == 1:
+ interpolated_points = points2.repeat(1, N, 1)
+ else:
+ dists = square_distance(xyz1, xyz2)
+ dists, idx = dists.sort(dim=-1)
+ dists, idx = dists[:, :, :3], idx[:, :, :3] # [B, N, 3]
+
+ dist_recip = 1.0 / (dists + 1e-8)
+ norm = torch.sum(dist_recip, dim=2, keepdim=True)
+ weight = dist_recip / norm
+ interpolated_points = torch.sum(index_points(points2, idx) * weight.view(B, N, 3, 1), dim=2)
+
+ if points1 is not None:
+ points1 = points1.permute(0, 2, 1)
+ new_points = torch.cat([points1, interpolated_points], dim=-1)
+ else:
+ new_points = interpolated_points
+
+ new_points = new_points.permute(0, 2, 1)
+ for i, conv in enumerate(self.mlp_convs):
+ bn = self.mlp_bns[i]
+ new_points = F.relu(bn(conv(new_points)), inplace=True)
+ return new_points
+
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/sem_seg/pointnet2_sem_seg/checkpoints/best_model.pth b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/sem_seg/pointnet2_sem_seg/checkpoints/best_model.pth
new file mode 100644
index 0000000000..6fdfdee087
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/sem_seg/pointnet2_sem_seg/checkpoints/best_model.pth differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/sem_seg/pointnet2_sem_seg/eval.txt b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/sem_seg/pointnet2_sem_seg/eval.txt
new file mode 100644
index 0000000000..0de06259ad
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/sem_seg/pointnet2_sem_seg/eval.txt
@@ -0,0 +1,90 @@
+2021-03-22 13:40:15,976 - Model - INFO - PARAMETER ...
+2021-03-22 13:40:15,977 - Model - INFO - Namespace(batch_size=32, gpu='2', log_dir='pointnet2_sem_seg', num_point=4096, num_votes=3, test_area=5, visual=False)
+2021-03-22 13:45:50,923 - Model - INFO - The number of test data is: 68
+2021-03-22 13:46:06,501 - Model - INFO - ---- EVALUATION WHOLE SCENE----
+2021-03-22 13:54:10,224 - Model - INFO - Mean IoU of Area_5_office_1: 0.5885
+2021-03-22 14:01:33,975 - Model - INFO - Mean IoU of Area_5_office_10: 0.6179
+2021-03-22 14:38:11,357 - Model - INFO - Mean IoU of Area_5_conferenceRoom_3: 0.6024
+2021-03-22 14:59:34,010 - Model - INFO - Mean IoU of Area_5_office_15: 0.5891
+2021-03-22 15:24:52,909 - Model - INFO - Mean IoU of Area_5_lobby_1: 0.7761
+2021-03-22 15:30:55,276 - Model - INFO - Mean IoU of Area_5_office_27: 0.6575
+2021-03-22 16:07:05,579 - Model - INFO - Mean IoU of Area_5_hallway_13: 0.6365
+2021-03-22 16:38:57,175 - Model - INFO - Mean IoU of Area_5_office_41: 0.6261
+2021-03-22 16:57:27,187 - Model - INFO - Mean IoU of Area_5_office_18: 0.6153
+2021-03-22 17:06:25,938 - Model - INFO - Mean IoU of Area_5_office_12: 0.5852
+2021-03-22 17:13:30,382 - Model - INFO - Mean IoU of Area_5_office_28: 0.6707
+2021-03-22 17:21:46,557 - Model - INFO - Mean IoU of Area_5_office_31: 0.6153
+2021-03-22 17:26:49,769 - Model - INFO - Mean IoU of Area_5_office_26: 0.5207
+2021-03-22 17:36:58,881 - Model - INFO - Mean IoU of Area_5_office_35: 0.7640
+2021-03-22 23:59:28,625 - Model - INFO - Mean IoU of Area_5_hallway_2: 0.7175
+2021-03-23 00:14:06,973 - Model - INFO - Mean IoU of Area_5_office_39: 0.6135
+2021-03-23 00:52:12,769 - Model - INFO - Mean IoU of Area_5_office_24: 0.5573
+2021-03-23 01:08:37,853 - Model - INFO - Mean IoU of Area_5_office_14: 0.6230
+2021-03-23 01:11:29,081 - Model - INFO - Mean IoU of Area_5_storage_4: 0.6659
+2021-03-23 01:28:36,731 - Model - INFO - Mean IoU of Area_5_hallway_6: 0.6781
+2021-03-23 02:06:28,342 - Model - INFO - Mean IoU of Area_5_office_19: 0.7059
+2021-03-23 02:14:30,956 - Model - INFO - Mean IoU of Area_5_office_42: 0.6049
+2021-03-23 03:50:57,178 - Model - INFO - Mean IoU of Area_5_office_40: 0.4828
+2021-03-23 03:57:10,565 - Model - INFO - Mean IoU of Area_5_office_4: 0.7075
+2021-03-23 04:04:08,882 - Model - INFO - Mean IoU of Area_5_office_32: 0.6113
+2021-03-23 05:12:29,614 - Model - INFO - Mean IoU of Area_5_office_37: 0.4237
+2021-03-23 05:25:57,774 - Model - INFO - Mean IoU of Area_5_office_36: 0.6645
+2021-03-23 05:30:57,288 - Model - INFO - Mean IoU of Area_5_office_6: 0.6999
+2021-03-23 05:34:10,814 - Model - INFO - Mean IoU of Area_5_hallway_14: 0.7079
+2021-03-23 06:04:14,884 - Model - INFO - Mean IoU of Area_5_office_29: 0.5924
+2021-03-23 06:09:53,963 - Model - INFO - Mean IoU of Area_5_office_3: 0.7578
+2021-03-23 06:15:24,359 - Model - INFO - Mean IoU of Area_5_hallway_4: 0.4651
+2021-03-23 06:20:05,846 - Model - INFO - Mean IoU of Area_5_storage_1: 0.3533
+2021-03-23 06:26:04,559 - Model - INFO - Mean IoU of Area_5_office_11: 0.6504
+2021-03-23 06:32:53,686 - Model - INFO - Mean IoU of Area_5_office_8: 0.6385
+2021-03-23 06:38:59,374 - Model - INFO - Mean IoU of Area_5_office_34: 0.6091
+2021-03-23 07:29:16,087 - Model - INFO - Mean IoU of Area_5_office_21: 0.6285
+2021-03-23 07:36:48,993 - Model - INFO - Mean IoU of Area_5_storage_2: 0.4328
+2021-03-23 07:40:48,011 - Model - INFO - Mean IoU of Area_5_office_9: 0.7193
+2021-03-23 07:42:18,540 - Model - INFO - Mean IoU of Area_5_office_30: 0.8101
+2021-03-23 07:49:04,036 - Model - INFO - Mean IoU of Area_5_hallway_11: 0.8154
+2021-03-23 08:41:39,690 - Model - INFO - Mean IoU of Area_5_conferenceRoom_2: 0.6049
+2021-03-23 08:45:25,196 - Model - INFO - Mean IoU of Area_5_office_5: 0.8352
+2021-03-23 08:52:35,233 - Model - INFO - Mean IoU of Area_5_hallway_12: 0.6916
+2021-03-23 08:59:25,839 - Model - INFO - Mean IoU of Area_5_office_7: 0.6239
+2021-03-23 09:01:18,197 - Model - INFO - Mean IoU of Area_5_hallway_8: 0.6584
+2021-03-23 09:07:32,416 - Model - INFO - Mean IoU of Area_5_office_33: 0.6477
+2021-03-23 09:29:47,860 - Model - INFO - Mean IoU of Area_5_hallway_9: 0.5687
+2021-03-23 10:52:54,462 - Model - INFO - Mean IoU of Area_5_office_38: 0.3907
+2021-03-23 11:24:48,653 - Model - INFO - Mean IoU of Area_5_hallway_10: 0.8178
+2021-03-23 11:38:13,811 - Model - INFO - Mean IoU of Area_5_office_13: 0.6312
+2021-03-23 11:45:05,688 - Model - INFO - Mean IoU of Area_5_WC_2: 0.7661
+2021-03-23 11:53:47,220 - Model - INFO - Mean IoU of Area_5_office_23: 0.6462
+2021-03-23 11:58:07,511 - Model - INFO - Mean IoU of Area_5_pantry_1: 0.7116
+2021-03-23 12:06:09,280 - Model - INFO - Mean IoU of Area_5_office_22: 0.6502
+2021-03-23 12:17:21,391 - Model - INFO - Mean IoU of Area_5_office_17: 0.5784
+2021-03-23 12:26:08,789 - Model - INFO - Mean IoU of Area_5_WC_1: 0.7258
+2021-03-23 12:34:21,005 - Model - INFO - Mean IoU of Area_5_office_2: 0.7176
+2021-03-23 14:53:38,858 - Model - INFO - Mean IoU of Area_5_hallway_5: 0.5241
+2021-03-23 15:09:10,854 - Model - INFO - Mean IoU of Area_5_conferenceRoom_1: 0.5500
+2021-03-23 15:41:32,436 - Model - INFO - Mean IoU of Area_5_hallway_15: 0.5137
+2021-03-23 15:49:09,586 - Model - INFO - Mean IoU of Area_5_office_16: 0.6031
+2021-03-23 16:13:21,947 - Model - INFO - Mean IoU of Area_5_hallway_7: 0.7646
+2021-03-23 16:35:25,771 - Model - INFO - Mean IoU of Area_5_hallway_3: 0.7395
+2021-03-23 16:35:42,819 - Model - INFO - Mean IoU of Area_5_storage_3: 0.5660
+2021-03-23 16:41:52,716 - Model - INFO - Mean IoU of Area_5_office_25: 0.6903
+2021-03-23 16:46:08,458 - Model - INFO - Mean IoU of Area_5_office_20: 0.5930
+2021-03-23 22:45:28,676 - Model - INFO - Mean IoU of Area_5_hallway_1: 0.3561
+2021-03-23 22:45:35,270 - Model - INFO - ------- IoU --------
+class ceiling , IoU: 0.894
+class floor , IoU: 0.977
+class wall , IoU: 0.754
+class beam , IoU: 0.000
+class column , IoU: 0.018
+class window , IoU: 0.583
+class door , IoU: 0.195
+class table , IoU: 0.692
+class chair , IoU: 0.790
+class sofa , IoU: 0.462
+class bookcase , IoU: 0.591
+class board , IoU: 0.587
+class clutter , IoU: 0.416
+
+2021-03-23 22:45:35,271 - Model - INFO - eval point avg class IoU: 0.535313
+2021-03-23 22:45:35,271 - Model - INFO - eval whole scene point avg class acc: 0.620289
+2021-03-23 22:45:35,271 - Model - INFO - eval whole scene point accuracy: 0.829893
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/sem_seg/pointnet2_sem_seg/logs/pointnet2_sem_seg.txt b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/sem_seg/pointnet2_sem_seg/logs/pointnet2_sem_seg.txt
new file mode 100644
index 0000000000..fadfdb1886
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/sem_seg/pointnet2_sem_seg/logs/pointnet2_sem_seg.txt
@@ -0,0 +1,1031 @@
+2021-03-20 23:41:43,195 - Model - INFO - PARAMETER ...
+2021-03-20 23:41:43,195 - Model - INFO - Namespace(batch_size=16, decay_rate=0.0001, epoch=128, gpu='0', learning_rate=0.001, log_dir='pointnet2_sem_seg', lr_decay=0.7, model='pointnet2_sem_seg', npoint=4096, optimizer='Adam', step_size=10, test_area=5)
+2021-03-21 01:22:39,196 - Model - INFO - The number of training data is: 47623
+2021-03-21 01:22:39,197 - Model - INFO - The number of test data is: 19191
+2021-03-21 01:22:39,263 - Model - INFO - No existing model, starting training from scratch...
+2021-03-21 01:22:39,266 - Model - INFO - **** Epoch 1 (1/128) ****
+2021-03-21 01:22:39,267 - Model - INFO - Learning rate:0.001000
+2021-03-21 02:50:07,309 - Model - INFO - Training mean loss: 0.817511
+2021-03-21 02:50:07,309 - Model - INFO - Training accuracy: 0.766915
+2021-03-21 02:50:07,310 - Model - INFO - Save model...
+2021-03-21 02:50:07,310 - Model - INFO - Saving at log/sem_seg/pointnet2_sem_seg/checkpoints/model.pth
+2021-03-21 02:50:07,356 - Model - INFO - Saving model....
+2021-03-21 02:50:07,992 - Model - INFO - ---- EPOCH 001 EVALUATION ----
+2021-03-21 03:44:13,933 - Model - INFO - eval mean loss: 0.767204
+2021-03-21 03:44:13,933 - Model - INFO - eval point avg class IoU: 0.445318
+2021-03-21 03:44:13,934 - Model - INFO - eval point accuracy: 0.780018
+2021-03-21 03:44:13,934 - Model - INFO - eval point avg class acc: 0.558046
+2021-03-21 03:44:13,934 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.885
+class floor weight: 0.200, IoU: 0.963
+class wall weight: 0.166, IoU: 0.684
+class beam weight: 0.279, IoU: 0.000
+class column weight: 0.000, IoU: 0.046
+class window weight: 0.019, IoU: 0.467
+class door weight: 0.033, IoU: 0.114
+class table weight: 0.030, IoU: 0.601
+class chair weight: 0.038, IoU: 0.489
+class sofa weight: 0.018, IoU: 0.251
+class bookcase weight: 0.003, IoU: 0.534
+class board weight: 0.110, IoU: 0.422
+class clutter weight: 0.012, IoU: 0.331
+
+2021-03-21 03:44:13,934 - Model - INFO - Eval mean loss: 0.767204
+2021-03-21 03:44:13,934 - Model - INFO - Eval accuracy: 0.780018
+2021-03-21 03:44:13,934 - Model - INFO - Save model...
+2021-03-21 03:44:13,934 - Model - INFO - Saving at log/sem_seg/pointnet2_sem_seg/checkpoints/best_model.pth
+2021-03-21 03:44:13,986 - Model - INFO - Saving model....
+2021-03-21 03:44:13,986 - Model - INFO - Best mIoU: 0.445318
+2021-03-21 03:44:13,986 - Model - INFO - **** Epoch 2 (2/128) ****
+2021-03-21 03:44:13,986 - Model - INFO - Learning rate:0.001000
+2021-03-21 05:12:45,311 - Model - INFO - Training mean loss: 0.539021
+2021-03-21 05:12:45,311 - Model - INFO - Training accuracy: 0.836284
+2021-03-21 05:12:45,886 - Model - INFO - ---- EPOCH 002 EVALUATION ----
+2021-03-21 06:01:39,219 - Model - INFO - eval mean loss: 0.767143
+2021-03-21 06:01:39,220 - Model - INFO - eval point avg class IoU: 0.483694
+2021-03-21 06:01:39,220 - Model - INFO - eval point accuracy: 0.791078
+2021-03-21 06:01:39,220 - Model - INFO - eval point avg class acc: 0.610968
+2021-03-21 06:01:39,220 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.894
+class floor weight: 0.201, IoU: 0.970
+class wall weight: 0.166, IoU: 0.676
+class beam weight: 0.280, IoU: 0.000
+class column weight: 0.000, IoU: 0.010
+class window weight: 0.018, IoU: 0.603
+class door weight: 0.035, IoU: 0.166
+class table weight: 0.030, IoU: 0.655
+class chair weight: 0.039, IoU: 0.711
+class sofa weight: 0.019, IoU: 0.289
+class bookcase weight: 0.003, IoU: 0.527
+class board weight: 0.107, IoU: 0.408
+class clutter weight: 0.011, IoU: 0.379
+
+2021-03-21 06:01:39,220 - Model - INFO - Eval mean loss: 0.767143
+2021-03-21 06:01:39,220 - Model - INFO - Eval accuracy: 0.791078
+2021-03-21 06:01:39,221 - Model - INFO - Save model...
+2021-03-21 06:01:39,221 - Model - INFO - Saving at log/sem_seg/pointnet2_sem_seg/checkpoints/best_model.pth
+2021-03-21 06:01:39,289 - Model - INFO - Saving model....
+2021-03-21 06:01:39,290 - Model - INFO - Best mIoU: 0.483694
+2021-03-21 06:01:39,290 - Model - INFO - **** Epoch 3 (3/128) ****
+2021-03-21 06:01:39,290 - Model - INFO - Learning rate:0.001000
+2021-03-21 07:29:28,683 - Model - INFO - Training mean loss: 0.459765
+2021-03-21 07:29:28,683 - Model - INFO - Training accuracy: 0.857295
+2021-03-21 07:29:29,299 - Model - INFO - ---- EPOCH 003 EVALUATION ----
+2021-03-21 08:19:32,746 - Model - INFO - eval mean loss: 0.725937
+2021-03-21 08:19:32,747 - Model - INFO - eval point avg class IoU: 0.494838
+2021-03-21 08:19:32,747 - Model - INFO - eval point accuracy: 0.807148
+2021-03-21 08:19:32,747 - Model - INFO - eval point avg class acc: 0.625430
+2021-03-21 08:19:32,747 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.089, IoU: 0.882
+class floor weight: 0.202, IoU: 0.973
+class wall weight: 0.169, IoU: 0.708
+class beam weight: 0.279, IoU: 0.000
+class column weight: 0.000, IoU: 0.046
+class window weight: 0.019, IoU: 0.550
+class door weight: 0.032, IoU: 0.211
+class table weight: 0.031, IoU: 0.655
+class chair weight: 0.038, IoU: 0.694
+class sofa weight: 0.020, IoU: 0.428
+class bookcase weight: 0.002, IoU: 0.590
+class board weight: 0.107, IoU: 0.317
+class clutter weight: 0.012, IoU: 0.381
+
+2021-03-21 08:19:32,748 - Model - INFO - Eval mean loss: 0.725937
+2021-03-21 08:19:32,748 - Model - INFO - Eval accuracy: 0.807148
+2021-03-21 08:19:32,748 - Model - INFO - Save model...
+2021-03-21 08:19:32,748 - Model - INFO - Saving at log/sem_seg/pointnet2_sem_seg/checkpoints/best_model.pth
+2021-03-21 08:19:32,810 - Model - INFO - Saving model....
+2021-03-21 08:19:32,810 - Model - INFO - Best mIoU: 0.494838
+2021-03-21 08:19:32,810 - Model - INFO - **** Epoch 4 (4/128) ****
+2021-03-21 08:19:32,810 - Model - INFO - Learning rate:0.001000
+2021-03-21 09:47:40,416 - Model - INFO - Training mean loss: 0.411783
+2021-03-21 09:47:40,417 - Model - INFO - Training accuracy: 0.870876
+2021-03-21 09:47:41,113 - Model - INFO - ---- EPOCH 004 EVALUATION ----
+2021-03-21 10:36:57,804 - Model - INFO - eval mean loss: 0.741349
+2021-03-21 10:36:57,804 - Model - INFO - eval point avg class IoU: 0.513180
+2021-03-21 10:36:57,804 - Model - INFO - eval point accuracy: 0.810367
+2021-03-21 10:36:57,805 - Model - INFO - eval point avg class acc: 0.630675
+2021-03-21 10:36:57,805 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.896
+class floor weight: 0.201, IoU: 0.972
+class wall weight: 0.166, IoU: 0.711
+class beam weight: 0.277, IoU: 0.000
+class column weight: 0.000, IoU: 0.054
+class window weight: 0.018, IoU: 0.536
+class door weight: 0.033, IoU: 0.241
+class table weight: 0.031, IoU: 0.621
+class chair weight: 0.039, IoU: 0.732
+class sofa weight: 0.020, IoU: 0.527
+class bookcase weight: 0.003, IoU: 0.580
+class board weight: 0.109, IoU: 0.407
+class clutter weight: 0.011, IoU: 0.394
+
+2021-03-21 10:36:57,805 - Model - INFO - Eval mean loss: 0.741349
+2021-03-21 10:36:57,805 - Model - INFO - Eval accuracy: 0.810367
+2021-03-21 10:36:57,805 - Model - INFO - Save model...
+2021-03-21 10:36:57,805 - Model - INFO - Saving at log/sem_seg/pointnet2_sem_seg/checkpoints/best_model.pth
+2021-03-21 10:36:57,864 - Model - INFO - Saving model....
+2021-03-21 10:36:57,864 - Model - INFO - Best mIoU: 0.513180
+2021-03-21 10:36:57,865 - Model - INFO - **** Epoch 5 (5/128) ****
+2021-03-21 10:36:57,865 - Model - INFO - Learning rate:0.001000
+2021-03-21 12:06:38,858 - Model - INFO - PARAMETER ...
+2021-03-21 12:06:38,858 - Model - INFO - Namespace(batch_size=16, decay_rate=0.0001, epoch=128, gpu='0', learning_rate=0.001, log_dir='pointnet2_sem_seg', lr_decay=0.7, model='pointnet2_sem_seg', npoint=4096, optimizer='Adam', step_size=10, test_area=5)
+2021-03-21 15:23:18,178 - Model - INFO - The number of training data is: 47623
+2021-03-21 15:23:18,183 - Model - INFO - The number of test data is: 19191
+2021-03-21 15:23:18,541 - Model - INFO - Use pretrain model
+2021-03-21 15:23:18,542 - Model - INFO - **** Epoch 1 (4/128) ****
+2021-03-21 15:23:18,543 - Model - INFO - Learning rate:0.001000
+2021-03-21 16:03:43,563 - Model - INFO - Training mean loss: 0.374252
+2021-03-21 16:03:43,564 - Model - INFO - Training accuracy: 0.881541
+2021-03-21 16:03:44,232 - Model - INFO - ---- EPOCH 001 EVALUATION ----
+2021-03-21 16:40:26,927 - Model - INFO - eval mean loss: 0.770434
+2021-03-21 16:40:26,928 - Model - INFO - eval point avg class IoU: 0.494568
+2021-03-21 16:40:26,928 - Model - INFO - eval point accuracy: 0.805591
+2021-03-21 16:40:26,928 - Model - INFO - eval point avg class acc: 0.622874
+2021-03-21 16:40:26,929 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.900
+class floor weight: 0.199, IoU: 0.970
+class wall weight: 0.166, IoU: 0.716
+class beam weight: 0.282, IoU: 0.000
+class column weight: 0.000, IoU: 0.078
+class window weight: 0.018, IoU: 0.588
+class door weight: 0.033, IoU: 0.197
+class table weight: 0.029, IoU: 0.651
+class chair weight: 0.039, IoU: 0.632
+class sofa weight: 0.019, IoU: 0.271
+class bookcase weight: 0.002, IoU: 0.526
+class board weight: 0.109, IoU: 0.506
+class clutter weight: 0.012, IoU: 0.394
+
+2021-03-21 16:40:26,929 - Model - INFO - Eval mean loss: 0.770434
+2021-03-21 16:40:26,929 - Model - INFO - Eval accuracy: 0.805591
+2021-03-21 16:40:26,929 - Model - INFO - Save model...
+2021-03-21 16:40:26,930 - Model - INFO - Saving at log/sem_seg/pointnet2_sem_seg/checkpoints/best_model.pth
+2021-03-21 16:40:26,990 - Model - INFO - Saving model....
+2021-03-21 16:40:26,990 - Model - INFO - Best mIoU: 0.494568
+2021-03-21 16:40:26,990 - Model - INFO - **** Epoch 2 (5/128) ****
+2021-03-21 16:40:26,990 - Model - INFO - Learning rate:0.001000
+2021-03-21 18:22:20,579 - Model - INFO - Training mean loss: 0.350288
+2021-03-21 18:22:20,583 - Model - INFO - Training accuracy: 0.888712
+2021-03-21 18:22:21,262 - Model - INFO - ---- EPOCH 002 EVALUATION ----
+2021-03-21 18:59:47,920 - Model - INFO - eval mean loss: 0.785631
+2021-03-21 18:59:47,921 - Model - INFO - eval point avg class IoU: 0.511625
+2021-03-21 18:59:47,921 - Model - INFO - eval point accuracy: 0.797537
+2021-03-21 18:59:47,921 - Model - INFO - eval point avg class acc: 0.608187
+2021-03-21 18:59:47,921 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.885
+class floor weight: 0.199, IoU: 0.975
+class wall weight: 0.166, IoU: 0.683
+class beam weight: 0.283, IoU: 0.000
+class column weight: 0.000, IoU: 0.109
+class window weight: 0.017, IoU: 0.550
+class door weight: 0.036, IoU: 0.232
+class table weight: 0.029, IoU: 0.668
+class chair weight: 0.039, IoU: 0.729
+class sofa weight: 0.018, IoU: 0.286
+class bookcase weight: 0.002, IoU: 0.574
+class board weight: 0.108, IoU: 0.552
+class clutter weight: 0.011, IoU: 0.408
+
+2021-03-21 18:59:47,921 - Model - INFO - Eval mean loss: 0.785631
+2021-03-21 18:59:47,921 - Model - INFO - Eval accuracy: 0.797537
+2021-03-21 18:59:47,921 - Model - INFO - Save model...
+2021-03-21 18:59:47,921 - Model - INFO - Saving at log/sem_seg/pointnet2_sem_seg/checkpoints/best_model.pth
+2021-03-21 18:59:47,975 - Model - INFO - Saving model....
+2021-03-21 18:59:47,975 - Model - INFO - Best mIoU: 0.511625
+2021-03-21 18:59:47,975 - Model - INFO - **** Epoch 3 (6/128) ****
+2021-03-21 18:59:47,975 - Model - INFO - Learning rate:0.001000
+2021-03-21 20:39:51,435 - Model - INFO - Training mean loss: 0.331193
+2021-03-21 20:39:51,436 - Model - INFO - Training accuracy: 0.893792
+2021-03-21 20:39:51,436 - Model - INFO - Save model...
+2021-03-21 20:39:51,436 - Model - INFO - Saving at log/sem_seg/pointnet2_sem_seg/checkpoints/model.pth
+2021-03-21 20:39:51,490 - Model - INFO - Saving model....
+2021-03-21 20:39:52,149 - Model - INFO - ---- EPOCH 003 EVALUATION ----
+2021-03-21 21:20:36,332 - Model - INFO - eval mean loss: 0.837822
+2021-03-21 21:20:36,332 - Model - INFO - eval point avg class IoU: 0.484818
+2021-03-21 21:20:36,332 - Model - INFO - eval point accuracy: 0.787165
+2021-03-21 21:20:36,333 - Model - INFO - eval point avg class acc: 0.605151
+2021-03-21 21:20:36,333 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.866
+class floor weight: 0.200, IoU: 0.971
+class wall weight: 0.167, IoU: 0.681
+class beam weight: 0.282, IoU: 0.000
+class column weight: 0.000, IoU: 0.084
+class window weight: 0.018, IoU: 0.564
+class door weight: 0.035, IoU: 0.120
+class table weight: 0.028, IoU: 0.668
+class chair weight: 0.039, IoU: 0.679
+class sofa weight: 0.019, IoU: 0.426
+class bookcase weight: 0.003, IoU: 0.519
+class board weight: 0.107, IoU: 0.346
+class clutter weight: 0.011, IoU: 0.378
+
+2021-03-21 21:20:36,333 - Model - INFO - Eval mean loss: 0.837822
+2021-03-21 21:20:36,333 - Model - INFO - Eval accuracy: 0.787165
+2021-03-21 21:20:36,333 - Model - INFO - Best mIoU: 0.511625
+2021-03-21 21:20:36,334 - Model - INFO - **** Epoch 4 (7/128) ****
+2021-03-21 21:20:36,334 - Model - INFO - Learning rate:0.001000
+2021-03-21 23:03:44,142 - Model - INFO - Training mean loss: 0.316568
+2021-03-21 23:03:44,142 - Model - INFO - Training accuracy: 0.898078
+2021-03-21 23:03:44,877 - Model - INFO - ---- EPOCH 004 EVALUATION ----
+2021-03-21 23:44:08,056 - Model - INFO - eval mean loss: 0.952962
+2021-03-21 23:44:08,057 - Model - INFO - eval point avg class IoU: 0.488863
+2021-03-21 23:44:08,057 - Model - INFO - eval point accuracy: 0.796450
+2021-03-21 23:44:08,057 - Model - INFO - eval point avg class acc: 0.597764
+2021-03-21 23:44:08,057 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.898
+class floor weight: 0.200, IoU: 0.975
+class wall weight: 0.166, IoU: 0.700
+class beam weight: 0.280, IoU: 0.000
+class column weight: 0.000, IoU: 0.052
+class window weight: 0.018, IoU: 0.609
+class door weight: 0.034, IoU: 0.092
+class table weight: 0.030, IoU: 0.678
+class chair weight: 0.040, IoU: 0.705
+class sofa weight: 0.019, IoU: 0.449
+class bookcase weight: 0.003, IoU: 0.450
+class board weight: 0.108, IoU: 0.354
+class clutter weight: 0.012, IoU: 0.394
+
+2021-03-21 23:44:08,058 - Model - INFO - Eval mean loss: 0.952962
+2021-03-21 23:44:08,058 - Model - INFO - Eval accuracy: 0.796450
+2021-03-21 23:44:08,058 - Model - INFO - Best mIoU: 0.511625
+2021-03-21 23:44:08,058 - Model - INFO - **** Epoch 5 (8/128) ****
+2021-03-21 23:44:08,058 - Model - INFO - Learning rate:0.001000
+2021-03-22 01:29:35,271 - Model - INFO - Training mean loss: 0.305775
+2021-03-22 01:29:35,271 - Model - INFO - Training accuracy: 0.901324
+2021-03-22 01:29:36,206 - Model - INFO - ---- EPOCH 005 EVALUATION ----
+2021-03-22 02:08:47,028 - Model - INFO - eval mean loss: 0.899594
+2021-03-22 02:08:47,028 - Model - INFO - eval point avg class IoU: 0.470350
+2021-03-22 02:08:47,028 - Model - INFO - eval point accuracy: 0.793260
+2021-03-22 02:08:47,029 - Model - INFO - eval point avg class acc: 0.585985
+2021-03-22 02:08:47,029 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.898
+class floor weight: 0.199, IoU: 0.975
+class wall weight: 0.165, IoU: 0.698
+class beam weight: 0.282, IoU: 0.000
+class column weight: 0.000, IoU: 0.116
+class window weight: 0.018, IoU: 0.480
+class door weight: 0.035, IoU: 0.164
+class table weight: 0.029, IoU: 0.636
+class chair weight: 0.039, IoU: 0.562
+class sofa weight: 0.017, IoU: 0.181
+class bookcase weight: 0.002, IoU: 0.519
+class board weight: 0.110, IoU: 0.505
+class clutter weight: 0.013, IoU: 0.380
+
+2021-03-22 02:08:47,029 - Model - INFO - Eval mean loss: 0.899594
+2021-03-22 02:08:47,030 - Model - INFO - Eval accuracy: 0.793260
+2021-03-22 02:08:47,030 - Model - INFO - Best mIoU: 0.511625
+2021-03-22 02:08:47,030 - Model - INFO - **** Epoch 6 (9/128) ****
+2021-03-22 02:08:47,030 - Model - INFO - Learning rate:0.001000
+2021-03-22 03:47:20,293 - Model - INFO - Training mean loss: 0.294645
+2021-03-22 03:47:20,294 - Model - INFO - Training accuracy: 0.903947
+2021-03-22 03:47:21,186 - Model - INFO - ---- EPOCH 006 EVALUATION ----
+2021-03-22 04:28:00,256 - Model - INFO - eval mean loss: 0.918813
+2021-03-22 04:28:00,256 - Model - INFO - eval point avg class IoU: 0.499807
+2021-03-22 04:28:00,256 - Model - INFO - eval point accuracy: 0.803368
+2021-03-22 04:28:00,256 - Model - INFO - eval point avg class acc: 0.597059
+2021-03-22 04:28:00,257 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.890
+class floor weight: 0.200, IoU: 0.956
+class wall weight: 0.167, IoU: 0.734
+class beam weight: 0.277, IoU: 0.000
+class column weight: 0.000, IoU: 0.090
+class window weight: 0.019, IoU: 0.627
+class door weight: 0.036, IoU: 0.137
+class table weight: 0.030, IoU: 0.642
+class chair weight: 0.039, IoU: 0.697
+class sofa weight: 0.019, IoU: 0.384
+class bookcase weight: 0.003, IoU: 0.457
+class board weight: 0.109, IoU: 0.520
+class clutter weight: 0.011, IoU: 0.363
+
+2021-03-22 04:28:00,257 - Model - INFO - Eval mean loss: 0.918813
+2021-03-22 04:28:00,257 - Model - INFO - Eval accuracy: 0.803368
+2021-03-22 04:28:00,257 - Model - INFO - Best mIoU: 0.511625
+2021-03-22 04:28:00,257 - Model - INFO - **** Epoch 7 (10/128) ****
+2021-03-22 04:28:00,257 - Model - INFO - Learning rate:0.001000
+2021-03-22 06:13:30,051 - Model - INFO - Training mean loss: 0.288375
+2021-03-22 06:13:30,051 - Model - INFO - Training accuracy: 0.906608
+2021-03-22 06:13:30,817 - Model - INFO - ---- EPOCH 007 EVALUATION ----
+2021-03-22 06:51:47,755 - Model - INFO - eval mean loss: 0.828517
+2021-03-22 06:51:47,755 - Model - INFO - eval point avg class IoU: 0.487858
+2021-03-22 06:51:47,756 - Model - INFO - eval point accuracy: 0.785244
+2021-03-22 06:51:47,756 - Model - INFO - eval point avg class acc: 0.605936
+2021-03-22 06:51:47,756 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.093, IoU: 0.896
+class floor weight: 0.202, IoU: 0.963
+class wall weight: 0.167, IoU: 0.665
+class beam weight: 0.282, IoU: 0.000
+class column weight: 0.000, IoU: 0.106
+class window weight: 0.018, IoU: 0.479
+class door weight: 0.032, IoU: 0.154
+class table weight: 0.029, IoU: 0.664
+class chair weight: 0.040, IoU: 0.690
+class sofa weight: 0.018, IoU: 0.345
+class bookcase weight: 0.003, IoU: 0.569
+class board weight: 0.106, IoU: 0.444
+class clutter weight: 0.012, IoU: 0.368
+
+2021-03-22 06:51:47,757 - Model - INFO - Eval mean loss: 0.828517
+2021-03-22 06:51:47,757 - Model - INFO - Eval accuracy: 0.785244
+2021-03-22 06:51:47,757 - Model - INFO - Best mIoU: 0.511625
+2021-03-22 06:51:47,757 - Model - INFO - **** Epoch 8 (11/128) ****
+2021-03-22 06:51:47,757 - Model - INFO - Learning rate:0.000700
+2021-03-22 08:33:10,647 - Model - INFO - Training mean loss: 0.250363
+2021-03-22 08:33:10,647 - Model - INFO - Training accuracy: 0.917025
+2021-03-22 08:33:10,648 - Model - INFO - Save model...
+2021-03-22 08:33:10,648 - Model - INFO - Saving at log/sem_seg/pointnet2_sem_seg/checkpoints/model.pth
+2021-03-22 08:33:10,720 - Model - INFO - Saving model....
+2021-03-22 08:33:11,495 - Model - INFO - ---- EPOCH 008 EVALUATION ----
+2021-03-22 09:12:29,103 - Model - INFO - eval mean loss: 0.834687
+2021-03-22 09:12:29,103 - Model - INFO - eval point avg class IoU: 0.524558
+2021-03-22 09:12:29,103 - Model - INFO - eval point accuracy: 0.822807
+2021-03-22 09:12:29,104 - Model - INFO - eval point avg class acc: 0.617383
+2021-03-22 09:12:29,104 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.896
+class floor weight: 0.201, IoU: 0.972
+class wall weight: 0.167, IoU: 0.745
+class beam weight: 0.282, IoU: 0.000
+class column weight: 0.000, IoU: 0.027
+class window weight: 0.016, IoU: 0.581
+class door weight: 0.031, IoU: 0.223
+class table weight: 0.032, IoU: 0.668
+class chair weight: 0.039, IoU: 0.741
+class sofa weight: 0.019, IoU: 0.439
+class bookcase weight: 0.003, IoU: 0.576
+class board weight: 0.108, IoU: 0.552
+class clutter weight: 0.012, IoU: 0.401
+
+2021-03-22 09:12:29,104 - Model - INFO - Eval mean loss: 0.834687
+2021-03-22 09:12:29,105 - Model - INFO - Eval accuracy: 0.822807
+2021-03-22 09:12:29,105 - Model - INFO - Save model...
+2021-03-22 09:12:29,105 - Model - INFO - Saving at log/sem_seg/pointnet2_sem_seg/checkpoints/best_model.pth
+2021-03-22 09:12:29,184 - Model - INFO - Saving model....
+2021-03-22 09:12:29,184 - Model - INFO - Best mIoU: 0.524558
+2021-03-22 09:12:29,184 - Model - INFO - **** Epoch 9 (12/128) ****
+2021-03-22 09:12:29,184 - Model - INFO - Learning rate:0.000700
+2021-03-22 10:56:12,976 - Model - INFO - Training mean loss: 0.242814
+2021-03-22 10:56:12,976 - Model - INFO - Training accuracy: 0.919146
+2021-03-22 10:56:13,638 - Model - INFO - ---- EPOCH 009 EVALUATION ----
+2021-03-22 11:17:11,286 - Model - INFO - eval mean loss: 0.931769
+2021-03-22 11:17:11,286 - Model - INFO - eval point avg class IoU: 0.493690
+2021-03-22 11:17:11,286 - Model - INFO - eval point accuracy: 0.818041
+2021-03-22 11:17:11,286 - Model - INFO - eval point avg class acc: 0.584716
+2021-03-22 11:17:11,286 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.905
+class floor weight: 0.202, IoU: 0.972
+class wall weight: 0.169, IoU: 0.728
+class beam weight: 0.276, IoU: 0.000
+class column weight: 0.000, IoU: 0.064
+class window weight: 0.018, IoU: 0.558
+class door weight: 0.034, IoU: 0.098
+class table weight: 0.031, IoU: 0.674
+class chair weight: 0.039, IoU: 0.747
+class sofa weight: 0.019, IoU: 0.230
+class bookcase weight: 0.003, IoU: 0.566
+class board weight: 0.107, IoU: 0.468
+class clutter weight: 0.012, IoU: 0.407
+
+2021-03-22 11:17:11,287 - Model - INFO - Eval mean loss: 0.931769
+2021-03-22 11:17:11,287 - Model - INFO - Eval accuracy: 0.818041
+2021-03-22 11:17:11,287 - Model - INFO - Best mIoU: 0.524558
+2021-03-22 11:17:11,287 - Model - INFO - **** Epoch 10 (13/128) ****
+2021-03-22 11:17:11,287 - Model - INFO - Learning rate:0.000700
+2021-03-22 11:56:27,101 - Model - INFO - Training mean loss: 0.239647
+2021-03-22 11:56:27,102 - Model - INFO - Training accuracy: 0.920378
+2021-03-22 11:56:27,825 - Model - INFO - ---- EPOCH 010 EVALUATION ----
+2021-03-22 12:11:41,200 - Model - INFO - eval mean loss: 0.914079
+2021-03-22 12:11:41,200 - Model - INFO - eval point avg class IoU: 0.506630
+2021-03-22 12:11:41,200 - Model - INFO - eval point accuracy: 0.813612
+2021-03-22 12:11:41,200 - Model - INFO - eval point avg class acc: 0.615905
+2021-03-22 12:11:41,201 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.910
+class floor weight: 0.199, IoU: 0.966
+class wall weight: 0.166, IoU: 0.734
+class beam weight: 0.278, IoU: 0.000
+class column weight: 0.000, IoU: 0.081
+class window weight: 0.018, IoU: 0.554
+class door weight: 0.035, IoU: 0.196
+class table weight: 0.030, IoU: 0.654
+class chair weight: 0.040, IoU: 0.665
+class sofa weight: 0.019, IoU: 0.388
+class bookcase weight: 0.003, IoU: 0.501
+class board weight: 0.110, IoU: 0.516
+class clutter weight: 0.011, IoU: 0.422
+
+2021-03-22 12:11:41,201 - Model - INFO - Eval mean loss: 0.914079
+2021-03-22 12:11:41,201 - Model - INFO - Eval accuracy: 0.813612
+2021-03-22 12:11:41,201 - Model - INFO - Best mIoU: 0.524558
+2021-03-22 12:11:41,201 - Model - INFO - **** Epoch 11 (14/128) ****
+2021-03-22 12:11:41,201 - Model - INFO - Learning rate:0.000700
+2021-03-22 12:51:28,081 - Model - INFO - Training mean loss: 0.234682
+2021-03-22 12:51:28,082 - Model - INFO - Training accuracy: 0.921814
+2021-03-22 12:51:28,819 - Model - INFO - ---- EPOCH 011 EVALUATION ----
+2021-03-22 13:06:40,492 - Model - INFO - eval mean loss: 0.832918
+2021-03-22 13:06:40,493 - Model - INFO - eval point avg class IoU: 0.498641
+2021-03-22 13:06:40,493 - Model - INFO - eval point accuracy: 0.808078
+2021-03-22 13:06:40,493 - Model - INFO - eval point avg class acc: 0.627161
+2021-03-22 13:06:40,493 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.876
+class floor weight: 0.201, IoU: 0.974
+class wall weight: 0.169, IoU: 0.714
+class beam weight: 0.277, IoU: 0.000
+class column weight: 0.000, IoU: 0.075
+class window weight: 0.017, IoU: 0.575
+class door weight: 0.032, IoU: 0.121
+class table weight: 0.031, IoU: 0.663
+class chair weight: 0.040, IoU: 0.671
+class sofa weight: 0.019, IoU: 0.321
+class bookcase weight: 0.002, IoU: 0.594
+class board weight: 0.107, IoU: 0.499
+class clutter weight: 0.012, IoU: 0.400
+
+2021-03-22 13:06:40,493 - Model - INFO - Eval mean loss: 0.832918
+2021-03-22 13:06:40,493 - Model - INFO - Eval accuracy: 0.808078
+2021-03-22 13:06:40,493 - Model - INFO - Best mIoU: 0.524558
+2021-03-22 13:06:40,494 - Model - INFO - **** Epoch 12 (15/128) ****
+2021-03-22 13:06:40,494 - Model - INFO - Learning rate:0.000700
+2021-03-22 13:46:38,964 - Model - INFO - Training mean loss: 0.231733
+2021-03-22 13:46:38,964 - Model - INFO - Training accuracy: 0.922490
+2021-03-22 13:46:39,695 - Model - INFO - ---- EPOCH 012 EVALUATION ----
+2021-03-22 14:03:25,252 - Model - INFO - eval mean loss: 0.811755
+2021-03-22 14:03:25,253 - Model - INFO - eval point avg class IoU: 0.526403
+2021-03-22 14:03:25,253 - Model - INFO - eval point accuracy: 0.827060
+2021-03-22 14:03:25,253 - Model - INFO - eval point avg class acc: 0.636744
+2021-03-22 14:03:25,253 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.907
+class floor weight: 0.202, IoU: 0.963
+class wall weight: 0.169, IoU: 0.742
+class beam weight: 0.277, IoU: 0.000
+class column weight: 0.000, IoU: 0.078
+class window weight: 0.017, IoU: 0.575
+class door weight: 0.033, IoU: 0.234
+class table weight: 0.030, IoU: 0.666
+class chair weight: 0.038, IoU: 0.704
+class sofa weight: 0.019, IoU: 0.420
+class bookcase weight: 0.003, IoU: 0.610
+class board weight: 0.108, IoU: 0.533
+class clutter weight: 0.011, IoU: 0.412
+
+2021-03-22 14:03:25,253 - Model - INFO - Eval mean loss: 0.811755
+2021-03-22 14:03:25,253 - Model - INFO - Eval accuracy: 0.827060
+2021-03-22 14:03:25,253 - Model - INFO - Save model...
+2021-03-22 14:03:25,254 - Model - INFO - Saving at log/sem_seg/pointnet2_sem_seg/checkpoints/best_model.pth
+2021-03-22 14:03:25,319 - Model - INFO - Saving model....
+2021-03-22 14:03:25,319 - Model - INFO - Best mIoU: 0.526403
+2021-03-22 14:03:25,319 - Model - INFO - **** Epoch 13 (16/128) ****
+2021-03-22 14:03:25,319 - Model - INFO - Learning rate:0.000700
+2021-03-22 14:48:06,133 - Model - INFO - Training mean loss: 0.226651
+2021-03-22 14:48:06,134 - Model - INFO - Training accuracy: 0.923769
+2021-03-22 14:48:06,134 - Model - INFO - Save model...
+2021-03-22 14:48:06,134 - Model - INFO - Saving at log/sem_seg/pointnet2_sem_seg/checkpoints/model.pth
+2021-03-22 14:48:06,203 - Model - INFO - Saving model....
+2021-03-22 14:48:06,991 - Model - INFO - ---- EPOCH 013 EVALUATION ----
+2021-03-22 15:04:27,679 - Model - INFO - eval mean loss: 0.837565
+2021-03-22 15:04:27,680 - Model - INFO - eval point avg class IoU: 0.516057
+2021-03-22 15:04:27,680 - Model - INFO - eval point accuracy: 0.821471
+2021-03-22 15:04:27,680 - Model - INFO - eval point avg class acc: 0.620027
+2021-03-22 15:04:27,680 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.898
+class floor weight: 0.201, IoU: 0.970
+class wall weight: 0.168, IoU: 0.744
+class beam weight: 0.280, IoU: 0.000
+class column weight: 0.000, IoU: 0.076
+class window weight: 0.017, IoU: 0.510
+class door weight: 0.033, IoU: 0.182
+class table weight: 0.030, IoU: 0.660
+class chair weight: 0.039, IoU: 0.645
+class sofa weight: 0.018, IoU: 0.482
+class bookcase weight: 0.003, IoU: 0.595
+class board weight: 0.109, IoU: 0.539
+class clutter weight: 0.011, IoU: 0.408
+
+2021-03-22 15:04:27,680 - Model - INFO - Eval mean loss: 0.837565
+2021-03-22 15:04:27,680 - Model - INFO - Eval accuracy: 0.821471
+2021-03-22 15:04:27,680 - Model - INFO - Best mIoU: 0.526403
+2021-03-22 15:04:27,681 - Model - INFO - **** Epoch 14 (17/128) ****
+2021-03-22 15:04:27,681 - Model - INFO - Learning rate:0.000700
+2021-03-22 15:50:04,524 - Model - INFO - Training mean loss: 0.225915
+2021-03-22 15:50:04,525 - Model - INFO - Training accuracy: 0.924563
+2021-03-22 15:50:05,393 - Model - INFO - ---- EPOCH 014 EVALUATION ----
+2021-03-22 16:07:32,755 - Model - INFO - eval mean loss: 0.884040
+2021-03-22 16:07:32,756 - Model - INFO - eval point avg class IoU: 0.507686
+2021-03-22 16:07:32,756 - Model - INFO - eval point accuracy: 0.817938
+2021-03-22 16:07:32,756 - Model - INFO - eval point avg class acc: 0.619410
+2021-03-22 16:07:32,756 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.902
+class floor weight: 0.202, IoU: 0.978
+class wall weight: 0.169, IoU: 0.726
+class beam weight: 0.278, IoU: 0.000
+class column weight: 0.000, IoU: 0.076
+class window weight: 0.018, IoU: 0.561
+class door weight: 0.033, IoU: 0.105
+class table weight: 0.030, IoU: 0.667
+class chair weight: 0.039, IoU: 0.727
+class sofa weight: 0.019, IoU: 0.416
+class bookcase weight: 0.003, IoU: 0.583
+class board weight: 0.109, IoU: 0.463
+class clutter weight: 0.011, IoU: 0.396
+
+2021-03-22 16:07:32,757 - Model - INFO - Eval mean loss: 0.884040
+2021-03-22 16:07:32,757 - Model - INFO - Eval accuracy: 0.817938
+2021-03-22 16:07:32,757 - Model - INFO - Best mIoU: 0.526403
+2021-03-22 16:07:32,757 - Model - INFO - **** Epoch 15 (18/128) ****
+2021-03-22 16:07:32,757 - Model - INFO - Learning rate:0.000700
+2021-03-22 16:53:08,442 - Model - INFO - Training mean loss: 0.223656
+2021-03-22 16:53:08,443 - Model - INFO - Training accuracy: 0.925109
+2021-03-22 16:53:09,237 - Model - INFO - ---- EPOCH 015 EVALUATION ----
+2021-03-22 17:10:17,334 - Model - INFO - eval mean loss: 0.823627
+2021-03-22 17:10:17,335 - Model - INFO - eval point avg class IoU: 0.520244
+2021-03-22 17:10:17,335 - Model - INFO - eval point accuracy: 0.822956
+2021-03-22 17:10:17,335 - Model - INFO - eval point avg class acc: 0.621941
+2021-03-22 17:10:17,335 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.904
+class floor weight: 0.200, IoU: 0.974
+class wall weight: 0.166, IoU: 0.735
+class beam weight: 0.278, IoU: 0.000
+class column weight: 0.000, IoU: 0.087
+class window weight: 0.018, IoU: 0.601
+class door weight: 0.033, IoU: 0.215
+class table weight: 0.032, IoU: 0.646
+class chair weight: 0.039, IoU: 0.714
+class sofa weight: 0.019, IoU: 0.368
+class bookcase weight: 0.003, IoU: 0.591
+class board weight: 0.109, IoU: 0.517
+class clutter weight: 0.012, IoU: 0.411
+
+2021-03-22 17:10:17,335 - Model - INFO - Eval mean loss: 0.823627
+2021-03-22 17:10:17,336 - Model - INFO - Eval accuracy: 0.822956
+2021-03-22 17:10:17,336 - Model - INFO - Best mIoU: 0.526403
+2021-03-22 17:10:17,336 - Model - INFO - **** Epoch 16 (19/128) ****
+2021-03-22 17:10:17,336 - Model - INFO - Learning rate:0.000700
+2021-03-22 17:55:50,208 - Model - INFO - Training mean loss: 0.221048
+2021-03-22 17:55:50,208 - Model - INFO - Training accuracy: 0.925872
+2021-03-22 17:55:50,999 - Model - INFO - ---- EPOCH 016 EVALUATION ----
+2021-03-22 18:13:31,558 - Model - INFO - eval mean loss: 1.000570
+2021-03-22 18:13:31,559 - Model - INFO - eval point avg class IoU: 0.495087
+2021-03-22 18:13:31,559 - Model - INFO - eval point accuracy: 0.812642
+2021-03-22 18:13:31,559 - Model - INFO - eval point avg class acc: 0.590352
+2021-03-22 18:13:31,559 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.093, IoU: 0.894
+class floor weight: 0.199, IoU: 0.968
+class wall weight: 0.166, IoU: 0.736
+class beam weight: 0.280, IoU: 0.000
+class column weight: 0.000, IoU: 0.070
+class window weight: 0.018, IoU: 0.476
+class door weight: 0.034, IoU: 0.172
+class table weight: 0.029, IoU: 0.641
+class chair weight: 0.040, IoU: 0.667
+class sofa weight: 0.019, IoU: 0.343
+class bookcase weight: 0.003, IoU: 0.550
+class board weight: 0.107, IoU: 0.506
+class clutter weight: 0.012, IoU: 0.411
+
+2021-03-22 18:13:31,559 - Model - INFO - Eval mean loss: 1.000570
+2021-03-22 18:13:31,559 - Model - INFO - Eval accuracy: 0.812642
+2021-03-22 18:13:31,559 - Model - INFO - Best mIoU: 0.526403
+2021-03-22 18:13:31,560 - Model - INFO - **** Epoch 17 (20/128) ****
+2021-03-22 18:13:31,560 - Model - INFO - Learning rate:0.000700
+2021-03-22 18:59:54,479 - Model - INFO - Training mean loss: 0.216674
+2021-03-22 18:59:54,479 - Model - INFO - Training accuracy: 0.926990
+2021-03-22 18:59:55,353 - Model - INFO - ---- EPOCH 017 EVALUATION ----
+2021-03-22 19:17:31,641 - Model - INFO - eval mean loss: 0.980681
+2021-03-22 19:17:31,641 - Model - INFO - eval point avg class IoU: 0.496017
+2021-03-22 19:17:31,641 - Model - INFO - eval point accuracy: 0.815693
+2021-03-22 19:17:31,641 - Model - INFO - eval point avg class acc: 0.597266
+2021-03-22 19:17:31,642 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.893
+class floor weight: 0.200, IoU: 0.967
+class wall weight: 0.166, IoU: 0.721
+class beam weight: 0.281, IoU: 0.000
+class column weight: 0.000, IoU: 0.056
+class window weight: 0.019, IoU: 0.538
+class door weight: 0.033, IoU: 0.136
+class table weight: 0.030, IoU: 0.673
+class chair weight: 0.039, IoU: 0.668
+class sofa weight: 0.018, IoU: 0.275
+class bookcase weight: 0.002, IoU: 0.590
+class board weight: 0.109, IoU: 0.517
+class clutter weight: 0.012, IoU: 0.415
+
+2021-03-22 19:17:31,642 - Model - INFO - Eval mean loss: 0.980681
+2021-03-22 19:17:31,642 - Model - INFO - Eval accuracy: 0.815693
+2021-03-22 19:17:31,642 - Model - INFO - Best mIoU: 0.526403
+2021-03-22 19:17:31,642 - Model - INFO - **** Epoch 18 (21/128) ****
+2021-03-22 19:17:31,642 - Model - INFO - Learning rate:0.000490
+2021-03-22 20:02:53,291 - Model - INFO - Training mean loss: 0.194119
+2021-03-22 20:02:53,292 - Model - INFO - Training accuracy: 0.933500
+2021-03-22 20:02:53,292 - Model - INFO - Save model...
+2021-03-22 20:02:53,292 - Model - INFO - Saving at log/sem_seg/pointnet2_sem_seg/checkpoints/model.pth
+2021-03-22 20:02:53,356 - Model - INFO - Saving model....
+2021-03-22 20:02:54,107 - Model - INFO - ---- EPOCH 018 EVALUATION ----
+2021-03-22 20:20:21,767 - Model - INFO - eval mean loss: 0.943453
+2021-03-22 20:20:21,767 - Model - INFO - eval point avg class IoU: 0.515095
+2021-03-22 20:20:21,767 - Model - INFO - eval point accuracy: 0.821974
+2021-03-22 20:20:21,767 - Model - INFO - eval point avg class acc: 0.610816
+2021-03-22 20:20:21,767 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.904
+class floor weight: 0.200, IoU: 0.973
+class wall weight: 0.167, IoU: 0.744
+class beam weight: 0.279, IoU: 0.000
+class column weight: 0.000, IoU: 0.031
+class window weight: 0.018, IoU: 0.589
+class door weight: 0.033, IoU: 0.266
+class table weight: 0.032, IoU: 0.639
+class chair weight: 0.039, IoU: 0.744
+class sofa weight: 0.019, IoU: 0.360
+class bookcase weight: 0.003, IoU: 0.555
+class board weight: 0.109, IoU: 0.483
+class clutter weight: 0.011, IoU: 0.407
+
+2021-03-22 20:20:21,768 - Model - INFO - Eval mean loss: 0.943453
+2021-03-22 20:20:21,768 - Model - INFO - Eval accuracy: 0.821974
+2021-03-22 20:20:21,768 - Model - INFO - Best mIoU: 0.526403
+2021-03-22 20:20:21,768 - Model - INFO - **** Epoch 19 (22/128) ****
+2021-03-22 20:20:21,768 - Model - INFO - Learning rate:0.000490
+2021-03-22 21:06:01,258 - Model - INFO - Training mean loss: 0.188554
+2021-03-22 21:06:01,259 - Model - INFO - Training accuracy: 0.935240
+2021-03-22 21:06:02,055 - Model - INFO - ---- EPOCH 019 EVALUATION ----
+2021-03-22 21:23:03,030 - Model - INFO - eval mean loss: 0.945076
+2021-03-22 21:23:03,038 - Model - INFO - eval point avg class IoU: 0.515003
+2021-03-22 21:23:03,038 - Model - INFO - eval point accuracy: 0.822901
+2021-03-22 21:23:03,038 - Model - INFO - eval point avg class acc: 0.610188
+2021-03-22 21:23:03,038 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.898
+class floor weight: 0.200, IoU: 0.971
+class wall weight: 0.167, IoU: 0.733
+class beam weight: 0.279, IoU: 0.000
+class column weight: 0.000, IoU: 0.088
+class window weight: 0.020, IoU: 0.595
+class door weight: 0.034, IoU: 0.196
+class table weight: 0.029, IoU: 0.666
+class chair weight: 0.040, IoU: 0.724
+class sofa weight: 0.018, IoU: 0.304
+class bookcase weight: 0.003, IoU: 0.588
+class board weight: 0.107, IoU: 0.514
+class clutter weight: 0.011, IoU: 0.419
+
+2021-03-22 21:23:03,039 - Model - INFO - Eval mean loss: 0.945076
+2021-03-22 21:23:03,039 - Model - INFO - Eval accuracy: 0.822901
+2021-03-22 21:23:03,039 - Model - INFO - Best mIoU: 0.526403
+2021-03-22 21:23:03,039 - Model - INFO - **** Epoch 20 (23/128) ****
+2021-03-22 21:23:03,039 - Model - INFO - Learning rate:0.000490
+2021-03-22 22:05:53,624 - Model - INFO - Training mean loss: 0.186940
+2021-03-22 22:05:53,624 - Model - INFO - Training accuracy: 0.935835
+2021-03-22 22:05:54,547 - Model - INFO - ---- EPOCH 020 EVALUATION ----
+2021-03-22 22:22:24,103 - Model - INFO - eval mean loss: 0.923625
+2021-03-22 22:22:24,104 - Model - INFO - eval point avg class IoU: 0.514263
+2021-03-22 22:22:24,104 - Model - INFO - eval point accuracy: 0.829205
+2021-03-22 22:22:24,104 - Model - INFO - eval point avg class acc: 0.608236
+2021-03-22 22:22:24,104 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.910
+class floor weight: 0.202, IoU: 0.969
+class wall weight: 0.167, IoU: 0.747
+class beam weight: 0.279, IoU: 0.000
+class column weight: 0.000, IoU: 0.065
+class window weight: 0.018, IoU: 0.566
+class door weight: 0.032, IoU: 0.074
+class table weight: 0.030, IoU: 0.680
+class chair weight: 0.039, IoU: 0.744
+class sofa weight: 0.019, IoU: 0.378
+class bookcase weight: 0.003, IoU: 0.616
+class board weight: 0.109, IoU: 0.508
+class clutter weight: 0.012, IoU: 0.429
+
+2021-03-22 22:22:24,105 - Model - INFO - Eval mean loss: 0.923625
+2021-03-22 22:22:24,105 - Model - INFO - Eval accuracy: 0.829205
+2021-03-22 22:22:24,105 - Model - INFO - Best mIoU: 0.526403
+2021-03-22 22:22:24,105 - Model - INFO - **** Epoch 21 (24/128) ****
+2021-03-22 22:22:24,105 - Model - INFO - Learning rate:0.000490
+2021-03-22 23:06:32,378 - Model - INFO - Training mean loss: 0.186617
+2021-03-22 23:06:32,382 - Model - INFO - Training accuracy: 0.935805
+2021-03-22 23:06:33,097 - Model - INFO - ---- EPOCH 021 EVALUATION ----
+2021-03-23 00:41:58,230 - Model - INFO - eval mean loss: 0.845539
+2021-03-23 00:41:58,231 - Model - INFO - eval point avg class IoU: 0.520694
+2021-03-23 00:41:58,231 - Model - INFO - eval point accuracy: 0.828976
+2021-03-23 00:41:58,231 - Model - INFO - eval point avg class acc: 0.626974
+2021-03-23 00:41:58,232 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.903
+class floor weight: 0.203, IoU: 0.978
+class wall weight: 0.168, IoU: 0.733
+class beam weight: 0.276, IoU: 0.000
+class column weight: 0.000, IoU: 0.073
+class window weight: 0.019, IoU: 0.581
+class door weight: 0.034, IoU: 0.191
+class table weight: 0.029, IoU: 0.686
+class chair weight: 0.039, IoU: 0.679
+class sofa weight: 0.019, IoU: 0.370
+class bookcase weight: 0.003, IoU: 0.625
+class board weight: 0.108, IoU: 0.522
+class clutter weight: 0.011, IoU: 0.427
+
+2021-03-23 00:41:58,235 - Model - INFO - Eval mean loss: 0.845539
+2021-03-23 00:41:58,235 - Model - INFO - Eval accuracy: 0.828976
+2021-03-23 00:41:58,235 - Model - INFO - Best mIoU: 0.526403
+2021-03-23 00:41:58,235 - Model - INFO - **** Epoch 22 (25/128) ****
+2021-03-23 00:41:58,235 - Model - INFO - Learning rate:0.000490
+2021-03-23 01:48:11,557 - Model - INFO - Training mean loss: 0.184340
+2021-03-23 01:48:11,557 - Model - INFO - Training accuracy: 0.936617
+2021-03-23 01:48:12,329 - Model - INFO - ---- EPOCH 022 EVALUATION ----
+2021-03-23 02:03:44,155 - Model - INFO - eval mean loss: 0.915835
+2021-03-23 02:03:44,156 - Model - INFO - eval point avg class IoU: 0.514736
+2021-03-23 02:03:44,156 - Model - INFO - eval point accuracy: 0.825243
+2021-03-23 02:03:44,156 - Model - INFO - eval point avg class acc: 0.602019
+2021-03-23 02:03:44,157 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.093, IoU: 0.894
+class floor weight: 0.201, IoU: 0.977
+class wall weight: 0.168, IoU: 0.731
+class beam weight: 0.277, IoU: 0.000
+class column weight: 0.000, IoU: 0.042
+class window weight: 0.017, IoU: 0.517
+class door weight: 0.033, IoU: 0.175
+class table weight: 0.030, IoU: 0.675
+class chair weight: 0.038, IoU: 0.738
+class sofa weight: 0.018, IoU: 0.378
+class bookcase weight: 0.003, IoU: 0.613
+class board weight: 0.110, IoU: 0.521
+class clutter weight: 0.012, IoU: 0.431
+
+2021-03-23 02:03:44,157 - Model - INFO - Eval mean loss: 0.915835
+2021-03-23 02:03:44,157 - Model - INFO - Eval accuracy: 0.825243
+2021-03-23 02:03:44,157 - Model - INFO - Best mIoU: 0.526403
+2021-03-23 02:03:44,157 - Model - INFO - **** Epoch 23 (26/128) ****
+2021-03-23 02:03:44,158 - Model - INFO - Learning rate:0.000490
+2021-03-23 02:44:56,484 - Model - INFO - Training mean loss: 0.181658
+2021-03-23 02:44:56,485 - Model - INFO - Training accuracy: 0.937326
+2021-03-23 02:44:56,485 - Model - INFO - Save model...
+2021-03-23 02:44:56,485 - Model - INFO - Saving at log/sem_seg/pointnet2_sem_seg/checkpoints/model.pth
+2021-03-23 02:44:56,553 - Model - INFO - Saving model....
+2021-03-23 02:44:57,330 - Model - INFO - ---- EPOCH 023 EVALUATION ----
+2021-03-23 03:00:31,244 - Model - INFO - eval mean loss: 1.004251
+2021-03-23 03:00:31,244 - Model - INFO - eval point avg class IoU: 0.506158
+2021-03-23 03:00:31,244 - Model - INFO - eval point accuracy: 0.822229
+2021-03-23 03:00:31,245 - Model - INFO - eval point avg class acc: 0.602190
+2021-03-23 03:00:31,245 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.901
+class floor weight: 0.200, IoU: 0.977
+class wall weight: 0.167, IoU: 0.739
+class beam weight: 0.277, IoU: 0.000
+class column weight: 0.000, IoU: 0.040
+class window weight: 0.020, IoU: 0.558
+class door weight: 0.034, IoU: 0.115
+class table weight: 0.029, IoU: 0.654
+class chair weight: 0.039, IoU: 0.728
+class sofa weight: 0.019, IoU: 0.360
+class bookcase weight: 0.003, IoU: 0.601
+class board weight: 0.110, IoU: 0.510
+class clutter weight: 0.011, IoU: 0.398
+
+2021-03-23 03:00:31,245 - Model - INFO - Eval mean loss: 1.004251
+2021-03-23 03:00:31,245 - Model - INFO - Eval accuracy: 0.822229
+2021-03-23 03:00:31,245 - Model - INFO - Best mIoU: 0.526403
+2021-03-23 03:00:31,245 - Model - INFO - **** Epoch 24 (27/128) ****
+2021-03-23 03:00:31,245 - Model - INFO - Learning rate:0.000490
+2021-03-23 03:40:27,797 - Model - INFO - Training mean loss: 0.184528
+2021-03-23 03:40:27,797 - Model - INFO - Training accuracy: 0.936480
+2021-03-23 03:40:28,442 - Model - INFO - ---- EPOCH 024 EVALUATION ----
+2021-03-23 03:56:00,333 - Model - INFO - eval mean loss: 0.902394
+2021-03-23 03:56:00,333 - Model - INFO - eval point avg class IoU: 0.515602
+2021-03-23 03:56:00,334 - Model - INFO - eval point accuracy: 0.823924
+2021-03-23 03:56:00,334 - Model - INFO - eval point avg class acc: 0.613083
+2021-03-23 03:56:00,334 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.896
+class floor weight: 0.203, IoU: 0.978
+class wall weight: 0.170, IoU: 0.730
+class beam weight: 0.276, IoU: 0.000
+class column weight: 0.000, IoU: 0.118
+class window weight: 0.017, IoU: 0.595
+class door weight: 0.033, IoU: 0.148
+class table weight: 0.030, IoU: 0.660
+class chair weight: 0.039, IoU: 0.733
+class sofa weight: 0.019, IoU: 0.283
+class bookcase weight: 0.003, IoU: 0.594
+class board weight: 0.108, IoU: 0.556
+class clutter weight: 0.010, IoU: 0.412
+
+2021-03-23 03:56:00,335 - Model - INFO - Eval mean loss: 0.902394
+2021-03-23 03:56:00,335 - Model - INFO - Eval accuracy: 0.823924
+2021-03-23 03:56:00,335 - Model - INFO - Best mIoU: 0.526403
+2021-03-23 03:56:00,336 - Model - INFO - **** Epoch 25 (28/128) ****
+2021-03-23 03:56:00,336 - Model - INFO - Learning rate:0.000490
+2021-03-23 04:36:48,991 - Model - INFO - Training mean loss: 0.181103
+2021-03-23 04:36:48,991 - Model - INFO - Training accuracy: 0.937600
+2021-03-23 04:36:49,631 - Model - INFO - ---- EPOCH 025 EVALUATION ----
+2021-03-23 04:51:16,125 - Model - INFO - eval mean loss: 0.923668
+2021-03-23 04:51:16,126 - Model - INFO - eval point avg class IoU: 0.512343
+2021-03-23 04:51:16,126 - Model - INFO - eval point accuracy: 0.818776
+2021-03-23 04:51:16,126 - Model - INFO - eval point avg class acc: 0.615073
+2021-03-23 04:51:16,126 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.894
+class floor weight: 0.201, IoU: 0.971
+class wall weight: 0.168, IoU: 0.728
+class beam weight: 0.279, IoU: 0.000
+class column weight: 0.000, IoU: 0.099
+class window weight: 0.017, IoU: 0.616
+class door weight: 0.034, IoU: 0.091
+class table weight: 0.030, IoU: 0.688
+class chair weight: 0.039, IoU: 0.729
+class sofa weight: 0.019, IoU: 0.362
+class bookcase weight: 0.003, IoU: 0.567
+class board weight: 0.108, IoU: 0.503
+class clutter weight: 0.011, IoU: 0.412
+
+2021-03-23 04:51:16,126 - Model - INFO - Eval mean loss: 0.923668
+2021-03-23 04:51:16,127 - Model - INFO - Eval accuracy: 0.818776
+2021-03-23 04:51:16,127 - Model - INFO - Best mIoU: 0.526403
+2021-03-23 04:51:16,127 - Model - INFO - **** Epoch 26 (29/128) ****
+2021-03-23 04:51:16,127 - Model - INFO - Learning rate:0.000490
+2021-03-23 05:29:21,137 - Model - INFO - Training mean loss: 0.180734
+2021-03-23 05:29:21,138 - Model - INFO - Training accuracy: 0.937785
+2021-03-23 05:29:21,710 - Model - INFO - ---- EPOCH 026 EVALUATION ----
+2021-03-23 05:43:44,769 - Model - INFO - eval mean loss: 0.968068
+2021-03-23 05:43:44,769 - Model - INFO - eval point avg class IoU: 0.503748
+2021-03-23 05:43:44,769 - Model - INFO - eval point accuracy: 0.824537
+2021-03-23 05:43:44,769 - Model - INFO - eval point avg class acc: 0.606287
+2021-03-23 05:43:44,769 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.897
+class floor weight: 0.202, IoU: 0.974
+class wall weight: 0.167, IoU: 0.712
+class beam weight: 0.275, IoU: 0.000
+class column weight: 0.000, IoU: 0.036
+class window weight: 0.019, IoU: 0.571
+class door weight: 0.034, IoU: 0.095
+class table weight: 0.031, IoU: 0.674
+class chair weight: 0.040, IoU: 0.730
+class sofa weight: 0.019, IoU: 0.296
+class bookcase weight: 0.002, IoU: 0.636
+class board weight: 0.108, IoU: 0.512
+class clutter weight: 0.011, IoU: 0.415
+
+2021-03-23 05:43:44,770 - Model - INFO - Eval mean loss: 0.968068
+2021-03-23 05:43:44,770 - Model - INFO - Eval accuracy: 0.824537
+2021-03-23 05:43:44,770 - Model - INFO - Best mIoU: 0.526403
+2021-03-23 05:43:44,770 - Model - INFO - **** Epoch 27 (30/128) ****
+2021-03-23 05:43:44,770 - Model - INFO - Learning rate:0.000490
+2021-03-23 06:21:55,298 - Model - INFO - Training mean loss: 0.180800
+2021-03-23 06:21:55,298 - Model - INFO - Training accuracy: 0.937759
+2021-03-23 06:21:55,881 - Model - INFO - ---- EPOCH 027 EVALUATION ----
+2021-03-23 06:36:13,322 - Model - INFO - eval mean loss: 0.972755
+2021-03-23 06:36:13,322 - Model - INFO - eval point avg class IoU: 0.495973
+2021-03-23 06:36:13,322 - Model - INFO - eval point accuracy: 0.822103
+2021-03-23 06:36:13,322 - Model - INFO - eval point avg class acc: 0.589392
+2021-03-23 06:36:13,323 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.899
+class floor weight: 0.202, IoU: 0.960
+class wall weight: 0.168, IoU: 0.745
+class beam weight: 0.277, IoU: 0.000
+class column weight: 0.000, IoU: 0.034
+class window weight: 0.017, IoU: 0.521
+class door weight: 0.032, IoU: 0.135
+class table weight: 0.030, IoU: 0.664
+class chair weight: 0.040, IoU: 0.701
+class sofa weight: 0.019, IoU: 0.291
+class bookcase weight: 0.002, IoU: 0.607
+class board weight: 0.110, IoU: 0.484
+class clutter weight: 0.011, IoU: 0.407
+
+2021-03-23 06:36:13,323 - Model - INFO - Eval mean loss: 0.972755
+2021-03-23 06:36:13,323 - Model - INFO - Eval accuracy: 0.822103
+2021-03-23 06:36:13,323 - Model - INFO - Best mIoU: 0.526403
+2021-03-23 06:36:13,323 - Model - INFO - **** Epoch 28 (31/128) ****
+2021-03-23 06:36:13,323 - Model - INFO - Learning rate:0.000343
+2021-03-23 07:14:45,633 - Model - INFO - Training mean loss: 0.159198
+2021-03-23 07:14:45,634 - Model - INFO - Training accuracy: 0.944097
+2021-03-23 07:14:45,634 - Model - INFO - Save model...
+2021-03-23 07:14:45,634 - Model - INFO - Saving at log/sem_seg/pointnet2_sem_seg/checkpoints/model.pth
+2021-03-23 07:14:45,994 - Model - INFO - Saving model....
+2021-03-23 07:14:46,578 - Model - INFO - ---- EPOCH 028 EVALUATION ----
+2021-03-23 07:29:19,564 - Model - INFO - eval mean loss: 1.025074
+2021-03-23 07:29:19,564 - Model - INFO - eval point avg class IoU: 0.511310
+2021-03-23 07:29:19,565 - Model - INFO - eval point accuracy: 0.825304
+2021-03-23 07:29:19,565 - Model - INFO - eval point avg class acc: 0.605690
+2021-03-23 07:29:19,565 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.884
+class floor weight: 0.200, IoU: 0.973
+class wall weight: 0.167, IoU: 0.737
+class beam weight: 0.278, IoU: 0.000
+class column weight: 0.000, IoU: 0.047
+class window weight: 0.019, IoU: 0.577
+class door weight: 0.033, IoU: 0.098
+class table weight: 0.030, IoU: 0.686
+class chair weight: 0.039, IoU: 0.714
+class sofa weight: 0.019, IoU: 0.429
+class bookcase weight: 0.003, IoU: 0.619
+class board weight: 0.109, IoU: 0.442
+class clutter weight: 0.012, IoU: 0.441
+
+2021-03-23 07:29:19,565 - Model - INFO - Eval mean loss: 1.025074
+2021-03-23 07:29:19,566 - Model - INFO - Eval accuracy: 0.825304
+2021-03-23 07:29:19,566 - Model - INFO - Best mIoU: 0.526403
+2021-03-23 07:29:19,566 - Model - INFO - **** Epoch 29 (32/128) ****
+2021-03-23 07:29:19,566 - Model - INFO - Learning rate:0.000343
+2021-03-23 08:07:32,957 - Model - INFO - Training mean loss: 0.160262
+2021-03-23 08:07:32,957 - Model - INFO - Training accuracy: 0.943529
+2021-03-23 08:07:33,493 - Model - INFO - ---- EPOCH 029 EVALUATION ----
+2021-03-23 08:22:14,658 - Model - INFO - eval mean loss: 1.098187
+2021-03-23 08:22:14,658 - Model - INFO - eval point avg class IoU: 0.508877
+2021-03-23 08:22:14,658 - Model - INFO - eval point accuracy: 0.817269
+2021-03-23 08:22:14,659 - Model - INFO - eval point avg class acc: 0.594025
+2021-03-23 08:22:14,659 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.894
+class floor weight: 0.202, IoU: 0.974
+class wall weight: 0.170, IoU: 0.723
+class beam weight: 0.275, IoU: 0.000
+class column weight: 0.000, IoU: 0.053
+class window weight: 0.018, IoU: 0.523
+class door weight: 0.033, IoU: 0.145
+class table weight: 0.031, IoU: 0.656
+class chair weight: 0.038, IoU: 0.756
+class sofa weight: 0.019, IoU: 0.392
+class bookcase weight: 0.003, IoU: 0.590
+class board weight: 0.107, IoU: 0.500
+class clutter weight: 0.011, IoU: 0.410
+
+2021-03-23 08:22:14,659 - Model - INFO - Eval mean loss: 1.098187
+2021-03-23 08:22:14,659 - Model - INFO - Eval accuracy: 0.817269
+2021-03-23 08:22:14,659 - Model - INFO - Best mIoU: 0.526403
+2021-03-23 08:22:14,660 - Model - INFO - **** Epoch 30 (33/128) ****
+2021-03-23 08:22:14,660 - Model - INFO - Learning rate:0.000343
+2021-03-23 09:00:26,018 - Model - INFO - Training mean loss: 0.159033
+2021-03-23 09:00:26,019 - Model - INFO - Training accuracy: 0.944311
+2021-03-23 09:00:26,697 - Model - INFO - ---- EPOCH 030 EVALUATION ----
+2021-03-23 09:14:51,067 - Model - INFO - eval mean loss: 1.037581
+2021-03-23 09:14:51,067 - Model - INFO - eval point avg class IoU: 0.509508
+2021-03-23 09:14:51,068 - Model - INFO - eval point accuracy: 0.822174
+2021-03-23 09:14:51,068 - Model - INFO - eval point avg class acc: 0.606001
+2021-03-23 09:14:51,068 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.093, IoU: 0.896
+class floor weight: 0.202, IoU: 0.975
+class wall weight: 0.168, IoU: 0.739
+class beam weight: 0.276, IoU: 0.000
+class column weight: 0.000, IoU: 0.054
+class window weight: 0.019, IoU: 0.562
+class door weight: 0.033, IoU: 0.108
+class table weight: 0.030, IoU: 0.657
+class chair weight: 0.039, IoU: 0.728
+class sofa weight: 0.019, IoU: 0.400
+class bookcase weight: 0.003, IoU: 0.607
+class board weight: 0.107, IoU: 0.476
+class clutter weight: 0.012, IoU: 0.421
+
+2021-03-23 09:14:51,068 - Model - INFO - Eval mean loss: 1.037581
+2021-03-23 09:14:51,068 - Model - INFO - Eval accuracy: 0.822174
+2021-03-23 09:14:51,068 - Model - INFO - Best mIoU: 0.526403
+2021-03-23 09:14:51,068 - Model - INFO - **** Epoch 31 (34/128) ****
+2021-03-23 09:14:51,068 - Model - INFO - Learning rate:0.000343
+2021-03-23 09:53:41,763 - Model - INFO - Training mean loss: 0.156626
+2021-03-23 09:53:41,764 - Model - INFO - Training accuracy: 0.944878
+2021-03-23 09:53:42,298 - Model - INFO - ---- EPOCH 031 EVALUATION ----
+2021-03-23 10:08:15,149 - Model - INFO - eval mean loss: 1.036335
+2021-03-23 10:08:15,149 - Model - INFO - eval point avg class IoU: 0.514305
+2021-03-23 10:08:15,149 - Model - INFO - eval point accuracy: 0.811708
+2021-03-23 10:08:15,149 - Model - INFO - eval point avg class acc: 0.614819
+2021-03-23 10:08:15,149 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.901
+class floor weight: 0.200, IoU: 0.967
+class wall weight: 0.167, IoU: 0.736
+class beam weight: 0.279, IoU: 0.000
+class column weight: 0.000, IoU: 0.075
+class window weight: 0.019, IoU: 0.541
+class door weight: 0.034, IoU: 0.155
+class table weight: 0.030, IoU: 0.655
+class chair weight: 0.038, IoU: 0.741
+class sofa weight: 0.019, IoU: 0.443
+class bookcase weight: 0.003, IoU: 0.559
+class board weight: 0.108, IoU: 0.531
+class clutter weight: 0.011, IoU: 0.381
+
+2021-03-23 10:08:15,150 - Model - INFO - Eval mean loss: 1.036335
+2021-03-23 10:08:15,150 - Model - INFO - Eval accuracy: 0.811708
+2021-03-23 10:08:15,150 - Model - INFO - Best mIoU: 0.526403
+2021-03-23 10:08:15,150 - Model - INFO - **** Epoch 32 (35/128) ****
+2021-03-23 10:08:15,150 - Model - INFO - Learning rate:0.000343
+2021-03-23 10:46:34,529 - Model - INFO - Training mean loss: 0.155862
+2021-03-23 10:46:34,530 - Model - INFO - Training accuracy: 0.945399
+2021-03-23 10:46:35,215 - Model - INFO - ---- EPOCH 032 EVALUATION ----
+2021-03-23 12:47:10,782 - Model - INFO - eval mean loss: 0.939576
+2021-03-23 12:47:10,783 - Model - INFO - eval point avg class IoU: 0.508077
+2021-03-23 12:47:10,783 - Model - INFO - eval point accuracy: 0.823115
+2021-03-23 12:47:10,783 - Model - INFO - eval point avg class acc: 0.631164
+2021-03-23 12:47:10,783 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.900
+class floor weight: 0.200, IoU: 0.968
+class wall weight: 0.168, IoU: 0.736
+class beam weight: 0.278, IoU: 0.000
+class column weight: 0.000, IoU: 0.055
+class window weight: 0.019, IoU: 0.535
+class door weight: 0.034, IoU: 0.175
+class table weight: 0.030, IoU: 0.683
+class chair weight: 0.039, IoU: 0.713
+class sofa weight: 0.019, IoU: 0.215
+class bookcase weight: 0.003, IoU: 0.628
+class board weight: 0.108, IoU: 0.590
+class clutter weight: 0.011, IoU: 0.409
+
+2021-03-23 12:47:10,783 - Model - INFO - Eval mean loss: 0.939576
+2021-03-23 12:47:10,783 - Model - INFO - Eval accuracy: 0.823115
+2021-03-23 12:47:10,783 - Model - INFO - Best mIoU: 0.526403
+2021-03-23 12:47:10,783 - Model - INFO - **** Epoch 33 (36/128) ****
+2021-03-23 12:47:10,783 - Model - INFO - Learning rate:0.000343
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/sem_seg/pointnet2_sem_seg/pointnet2_sem_seg.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/sem_seg/pointnet2_sem_seg/pointnet2_sem_seg.py
new file mode 100644
index 0000000000..e9b7332ae0
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/sem_seg/pointnet2_sem_seg/pointnet2_sem_seg.py
@@ -0,0 +1,55 @@
+import torch.nn as nn
+import torch.nn.functional as F
+from models.pointnet2_utils import PointNetSetAbstraction,PointNetFeaturePropagation
+
+
+class get_model(nn.Module):
+ def __init__(self, num_classes):
+ super(get_model, self).__init__()
+ self.sa1 = PointNetSetAbstraction(1024, 0.1, 32, 9 + 3, [32, 32, 64], False)
+ self.sa2 = PointNetSetAbstraction(256, 0.2, 32, 64 + 3, [64, 64, 128], False)
+ self.sa3 = PointNetSetAbstraction(64, 0.4, 32, 128 + 3, [128, 128, 256], False)
+ self.sa4 = PointNetSetAbstraction(16, 0.8, 32, 256 + 3, [256, 256, 512], False)
+ self.fp4 = PointNetFeaturePropagation(768, [256, 256])
+ self.fp3 = PointNetFeaturePropagation(384, [256, 256])
+ self.fp2 = PointNetFeaturePropagation(320, [256, 128])
+ self.fp1 = PointNetFeaturePropagation(128, [128, 128, 128])
+ self.conv1 = nn.Conv1d(128, 128, 1)
+ self.bn1 = nn.BatchNorm1d(128)
+ self.drop1 = nn.Dropout(0.5)
+ self.conv2 = nn.Conv1d(128, num_classes, 1)
+
+ def forward(self, xyz):
+ l0_points = xyz
+ l0_xyz = xyz[:,:3,:]
+
+ l1_xyz, l1_points = self.sa1(l0_xyz, l0_points)
+ l2_xyz, l2_points = self.sa2(l1_xyz, l1_points)
+ l3_xyz, l3_points = self.sa3(l2_xyz, l2_points)
+ l4_xyz, l4_points = self.sa4(l3_xyz, l3_points)
+
+ l3_points = self.fp4(l3_xyz, l4_xyz, l3_points, l4_points)
+ l2_points = self.fp3(l2_xyz, l3_xyz, l2_points, l3_points)
+ l1_points = self.fp2(l1_xyz, l2_xyz, l1_points, l2_points)
+ l0_points = self.fp1(l0_xyz, l1_xyz, None, l1_points)
+
+ x = self.drop1(F.relu(self.bn1(self.conv1(l0_points)), inplace=True))
+ x = self.conv2(x)
+ x = F.log_softmax(x, dim=1)
+ x = x.permute(0, 2, 1)
+ return x, l4_points
+
+
+class get_loss(nn.Module):
+ def __init__(self):
+ super(get_loss, self).__init__()
+ def forward(self, pred, target, trans_feat, weight):
+ total_loss = F.nll_loss(pred, target, weight=weight)
+
+ return total_loss
+
+if __name__ == '__main__':
+ import torch
+ model = get_model(13)
+ xyz = torch.rand(6, 9, 2048)
+ (model(xyz))
\ No newline at end of file
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/sem_seg/pointnet2_sem_seg/pointnet2_utils.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/sem_seg/pointnet2_sem_seg/pointnet2_utils.py
new file mode 100644
index 0000000000..4ef8763727
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/sem_seg/pointnet2_sem_seg/pointnet2_utils.py
@@ -0,0 +1,316 @@
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+from time import time
+import numpy as np
+
+def timeit(tag, t):
+ print("{}: {}s".format(tag, time() - t))
+ return time()
+
+def pc_normalize(pc):
+ l = pc.shape[0]
+ centroid = np.mean(pc, axis=0)
+ pc = pc - centroid
+ m = np.max(np.sqrt(np.sum(pc**2, axis=1)))
+ pc = pc / m
+ return pc
+
+def square_distance(src, dst):
+ """
+ Calculate Euclid distance between each two points.
+
+ src^T * dst = xn * xm + yn * ym + zn * zmï¼›
+ sum(src^2, dim=-1) = xn*xn + yn*yn + zn*zn;
+ sum(dst^2, dim=-1) = xm*xm + ym*ym + zm*zm;
+ dist = (xn - xm)^2 + (yn - ym)^2 + (zn - zm)^2
+ = sum(src**2,dim=-1)+sum(dst**2,dim=-1)-2*src^T*dst
+
+ Input:
+ src: source points, [B, N, C]
+ dst: target points, [B, M, C]
+ Output:
+ dist: per-point square distance, [B, N, M]
+ """
+ B, N, _ = src.shape
+ _, M, _ = dst.shape
+ dist = -2 * torch.matmul(src, dst.permute(0, 2, 1))
+ dist += torch.sum(src ** 2, -1).view(B, N, 1)
+ dist += torch.sum(dst ** 2, -1).view(B, 1, M)
+ return dist
+
+
+def index_points(points, idx):
+ """
+
+ Input:
+ points: input points data, [B, N, C]
+ idx: sample index data, [B, S]
+ Return:
+ new_points:, indexed points data, [B, S, C]
+ """
+ device = points.device
+ B = points.shape[0]
+ view_shape = list(idx.shape)
+ view_shape[1:] = [1] * (len(view_shape) - 1)
+ repeat_shape = list(idx.shape)
+ repeat_shape[0] = 1
+ batch_indices = torch.arange(B, dtype=torch.long).to(device).view(view_shape).repeat(repeat_shape)
+ new_points = points[batch_indices, idx, :]
+ return new_points
+
+
+def farthest_point_sample(xyz, npoint):
+ """
+ Input:
+ xyz: pointcloud data, [B, N, 3]
+ npoint: number of samples
+ Return:
+ centroids: sampled pointcloud index, [B, npoint]
+ """
+ device = xyz.device
+ B, N, C = xyz.shape
+ centroids = torch.zeros(B, npoint, dtype=torch.long).to(device)
+ distance = torch.ones(B, N).to(device) * 1e10
+ farthest = torch.randint(0, N, (B,), dtype=torch.long).to(device)
+ batch_indices = torch.arange(B, dtype=torch.long).to(device)
+ for i in range(npoint):
+ centroids[:, i] = farthest
+ centroid = xyz[batch_indices, farthest, :].view(B, 1, 3)
+ dist = torch.sum((xyz - centroid) ** 2, -1)
+ mask = dist < distance
+ distance[mask] = dist[mask]
+ farthest = torch.max(distance, -1)[1]
+ return centroids
+
+
+def query_ball_point(radius, nsample, xyz, new_xyz):
+ """
+ Input:
+ radius: local region radius
+ nsample: max sample number in local region
+ xyz: all points, [B, N, 3]
+ new_xyz: query points, [B, S, 3]
+ Return:
+ group_idx: grouped points index, [B, S, nsample]
+ """
+ device = xyz.device
+ B, N, C = xyz.shape
+ _, S, _ = new_xyz.shape
+ group_idx = torch.arange(N, dtype=torch.long).to(device).view(1, 1, N).repeat([B, S, 1])
+ sqrdists = square_distance(new_xyz, xyz)
+ group_idx[sqrdists > radius ** 2] = N
+ group_idx = group_idx.sort(dim=-1)[0][:, :, :nsample]
+ group_first = group_idx[:, :, 0].view(B, S, 1).repeat([1, 1, nsample])
+ mask = group_idx == N
+ group_idx[mask] = group_first[mask]
+ return group_idx
+
+
+def sample_and_group(npoint, radius, nsample, xyz, points, returnfps=False):
+ """
+ Input:
+ npoint:
+ radius:
+ nsample:
+ xyz: input points position data, [B, N, 3]
+ points: input points data, [B, N, D]
+ Return:
+ new_xyz: sampled points position data, [B, npoint, nsample, 3]
+ new_points: sampled points data, [B, npoint, nsample, 3+D]
+ """
+ B, N, C = xyz.shape
+ S = npoint
+ fps_idx = farthest_point_sample(xyz, npoint) # [B, npoint, C]
+ new_xyz = index_points(xyz, fps_idx)
+ idx = query_ball_point(radius, nsample, xyz, new_xyz)
+ grouped_xyz = index_points(xyz, idx) # [B, npoint, nsample, C]
+ grouped_xyz_norm = grouped_xyz - new_xyz.view(B, S, 1, C)
+
+ if points is not None:
+ grouped_points = index_points(points, idx)
+ new_points = torch.cat([grouped_xyz_norm, grouped_points], dim=-1) # [B, npoint, nsample, C+D]
+ else:
+ new_points = grouped_xyz_norm
+ if returnfps:
+ return new_xyz, new_points, grouped_xyz, fps_idx
+ else:
+ return new_xyz, new_points
+
+
+def sample_and_group_all(xyz, points):
+ """
+ Input:
+ xyz: input points position data, [B, N, 3]
+ points: input points data, [B, N, D]
+ Return:
+ new_xyz: sampled points position data, [B, 1, 3]
+ new_points: sampled points data, [B, 1, N, 3+D]
+ """
+ device = xyz.device
+ B, N, C = xyz.shape
+ new_xyz = torch.zeros(B, 1, C).to(device)
+ grouped_xyz = xyz.view(B, 1, N, C)
+ if points is not None:
+ new_points = torch.cat([grouped_xyz, points.view(B, 1, N, -1)], dim=-1)
+ else:
+ new_points = grouped_xyz
+ return new_xyz, new_points
+
+
+class PointNetSetAbstraction(nn.Module):
+ def __init__(self, npoint, radius, nsample, in_channel, mlp, group_all):
+ super(PointNetSetAbstraction, self).__init__()
+ self.npoint = npoint
+ self.radius = radius
+ self.nsample = nsample
+ self.mlp_convs = nn.ModuleList()
+ self.mlp_bns = nn.ModuleList()
+ last_channel = in_channel
+ for out_channel in mlp:
+ self.mlp_convs.append(nn.Conv2d(last_channel, out_channel, 1))
+ self.mlp_bns.append(nn.BatchNorm2d(out_channel))
+ last_channel = out_channel
+ self.group_all = group_all
+
+ def forward(self, xyz, points):
+ """
+ Input:
+ xyz: input points position data, [B, C, N]
+ points: input points data, [B, D, N]
+ Return:
+ new_xyz: sampled points position data, [B, C, S]
+ new_points_concat: sample points feature data, [B, D', S]
+ """
+ xyz = xyz.permute(0, 2, 1)
+ if points is not None:
+ points = points.permute(0, 2, 1)
+
+ if self.group_all:
+ new_xyz, new_points = sample_and_group_all(xyz, points)
+ else:
+ new_xyz, new_points = sample_and_group(self.npoint, self.radius, self.nsample, xyz, points)
+ # new_xyz: sampled points position data, [B, npoint, C]
+ # new_points: sampled points data, [B, npoint, nsample, C+D]
+ new_points = new_points.permute(0, 3, 2, 1) # [B, C+D, nsample,npoint]
+ for i, conv in enumerate(self.mlp_convs):
+ bn = self.mlp_bns[i]
+ new_points = F.relu(bn(conv(new_points)), inplace=True)
+
+ new_points = torch.max(new_points, 2)[0]
+ new_xyz = new_xyz.permute(0, 2, 1)
+ return new_xyz, new_points
+
+
+class PointNetSetAbstractionMsg(nn.Module):
+ def __init__(self, npoint, radius_list, nsample_list, in_channel, mlp_list):
+ super(PointNetSetAbstractionMsg, self).__init__()
+ self.npoint = npoint
+ self.radius_list = radius_list
+ self.nsample_list = nsample_list
+ self.conv_blocks = nn.ModuleList()
+ self.bn_blocks = nn.ModuleList()
+ for i in range(len(mlp_list)):
+ convs = nn.ModuleList()
+ bns = nn.ModuleList()
+ last_channel = in_channel + 3
+ for out_channel in mlp_list[i]:
+ convs.append(nn.Conv2d(last_channel, out_channel, 1))
+ bns.append(nn.BatchNorm2d(out_channel))
+ last_channel = out_channel
+ self.conv_blocks.append(convs)
+ self.bn_blocks.append(bns)
+
+ def forward(self, xyz, points):
+ """
+ Input:
+ xyz: input points position data, [B, C, N]
+ points: input points data, [B, D, N]
+ Return:
+ new_xyz: sampled points position data, [B, C, S]
+ new_points_concat: sample points feature data, [B, D', S]
+ """
+ xyz = xyz.permute(0, 2, 1)
+ if points is not None:
+ points = points.permute(0, 2, 1)
+
+ B, N, C = xyz.shape
+ S = self.npoint
+ new_xyz = index_points(xyz, farthest_point_sample(xyz, S))
+ new_points_list = []
+ for i, radius in enumerate(self.radius_list):
+ K = self.nsample_list[i]
+ group_idx = query_ball_point(radius, K, xyz, new_xyz)
+ grouped_xyz = index_points(xyz, group_idx)
+ grouped_xyz -= new_xyz.view(B, S, 1, C)
+ if points is not None:
+ grouped_points = index_points(points, group_idx)
+ grouped_points = torch.cat([grouped_points, grouped_xyz], dim=-1)
+ else:
+ grouped_points = grouped_xyz
+
+ grouped_points = grouped_points.permute(0, 3, 2, 1) # [B, D, K, S]
+ for j in range(len(self.conv_blocks[i])):
+ conv = self.conv_blocks[i][j]
+ bn = self.bn_blocks[i][j]
+ grouped_points = F.relu(bn(conv(grouped_points)), inplace=True)
+ new_points = torch.max(grouped_points, 2)[0] # [B, D', S]
+ new_points_list.append(new_points)
+
+ new_xyz = new_xyz.permute(0, 2, 1)
+ new_points_concat = torch.cat(new_points_list, dim=1)
+ return new_xyz, new_points_concat
+
+
+class PointNetFeaturePropagation(nn.Module):
+ def __init__(self, in_channel, mlp):
+ super(PointNetFeaturePropagation, self).__init__()
+ self.mlp_convs = nn.ModuleList()
+ self.mlp_bns = nn.ModuleList()
+ last_channel = in_channel
+ for out_channel in mlp:
+ self.mlp_convs.append(nn.Conv1d(last_channel, out_channel, 1))
+ self.mlp_bns.append(nn.BatchNorm1d(out_channel))
+ last_channel = out_channel
+
+ def forward(self, xyz1, xyz2, points1, points2):
+ """
+ Input:
+ xyz1: input points position data, [B, C, N]
+ xyz2: sampled input points position data, [B, C, S]
+ points1: input points data, [B, D, N]
+ points2: input points data, [B, D, S]
+ Return:
+ new_points: upsampled points data, [B, D', N]
+ """
+ xyz1 = xyz1.permute(0, 2, 1)
+ xyz2 = xyz2.permute(0, 2, 1)
+
+ points2 = points2.permute(0, 2, 1)
+ B, N, C = xyz1.shape
+ _, S, _ = xyz2.shape
+
+ if S == 1:
+ interpolated_points = points2.repeat(1, N, 1)
+ else:
+ dists = square_distance(xyz1, xyz2)
+ dists, idx = dists.sort(dim=-1)
+ dists, idx = dists[:, :, :3], idx[:, :, :3] # [B, N, 3]
+
+ dist_recip = 1.0 / (dists + 1e-8)
+ norm = torch.sum(dist_recip, dim=2, keepdim=True)
+ weight = dist_recip / norm
+ interpolated_points = torch.sum(index_points(points2, idx) * weight.view(B, N, 3, 1), dim=2)
+
+ if points1 is not None:
+ points1 = points1.permute(0, 2, 1)
+ new_points = torch.cat([points1, interpolated_points], dim=-1)
+ else:
+ new_points = interpolated_points
+
+ new_points = new_points.permute(0, 2, 1)
+ for i, conv in enumerate(self.mlp_convs):
+ bn = self.mlp_bns[i]
+ new_points = F.relu(bn(conv(new_points)), inplace=True)
+ return new_points
+
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/sem_seg/pointnet_sem_seg/checkpoints/best_model.pth b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/sem_seg/pointnet_sem_seg/checkpoints/best_model.pth
new file mode 100644
index 0000000000..cedf378340
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/sem_seg/pointnet_sem_seg/checkpoints/best_model.pth differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/sem_seg/pointnet_sem_seg/eval.txt b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/sem_seg/pointnet_sem_seg/eval.txt
new file mode 100644
index 0000000000..c940abd7d3
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/sem_seg/pointnet_sem_seg/eval.txt
@@ -0,0 +1,90 @@
+2021-03-22 14:00:42,819 - Model - INFO - PARAMETER ...
+2021-03-22 14:00:42,819 - Model - INFO - Namespace(batch_size=32, gpu='3', log_dir='pointnet_sem_seg', num_point=4096, num_votes=3, test_area=5, visual=False)
+2021-03-22 14:03:04,291 - Model - INFO - The number of test data is: 68
+2021-03-22 14:03:18,579 - Model - INFO - ---- EVALUATION WHOLE SCENE----
+2021-03-22 14:19:00,890 - Model - INFO - Mean IoU of Area_5_office_1: 0.5226
+2021-03-22 14:30:38,175 - Model - INFO - Mean IoU of Area_5_office_10: 0.6372
+2021-03-22 14:37:49,773 - Model - INFO - Mean IoU of Area_5_conferenceRoom_3: 0.5545
+2021-03-22 14:40:10,392 - Model - INFO - Mean IoU of Area_5_office_15: 0.5439
+2021-03-22 14:42:40,022 - Model - INFO - Mean IoU of Area_5_lobby_1: 0.7051
+2021-03-22 14:43:47,697 - Model - INFO - Mean IoU of Area_5_office_27: 0.5269
+2021-03-22 14:47:03,584 - Model - INFO - Mean IoU of Area_5_hallway_13: 0.6413
+2021-03-22 14:50:39,706 - Model - INFO - Mean IoU of Area_5_office_41: 0.5485
+2021-03-22 14:52:54,654 - Model - INFO - Mean IoU of Area_5_office_18: 0.5707
+2021-03-22 14:54:16,573 - Model - INFO - Mean IoU of Area_5_office_12: 0.6308
+2021-03-22 14:55:26,253 - Model - INFO - Mean IoU of Area_5_office_28: 0.6282
+2021-03-22 14:56:42,799 - Model - INFO - Mean IoU of Area_5_office_31: 0.5279
+2021-03-22 14:57:43,856 - Model - INFO - Mean IoU of Area_5_office_26: 0.5036
+2021-03-22 14:59:09,162 - Model - INFO - Mean IoU of Area_5_office_35: 0.5754
+2021-03-22 21:55:16,080 - Model - INFO - Mean IoU of Area_5_hallway_2: 0.7006
+2021-03-22 21:57:13,716 - Model - INFO - Mean IoU of Area_5_office_39: 0.4802
+2021-03-22 22:00:43,118 - Model - INFO - Mean IoU of Area_5_office_24: 0.5181
+2021-03-22 22:02:49,449 - Model - INFO - Mean IoU of Area_5_office_14: 0.5707
+2021-03-22 22:03:39,562 - Model - INFO - Mean IoU of Area_5_storage_4: 0.5377
+2021-03-22 22:05:37,193 - Model - INFO - Mean IoU of Area_5_hallway_6: 0.5737
+2021-03-22 22:09:55,731 - Model - INFO - Mean IoU of Area_5_office_19: 0.5672
+2021-03-22 22:11:18,077 - Model - INFO - Mean IoU of Area_5_office_42: 0.6817
+2021-03-22 22:59:23,649 - Model - INFO - Mean IoU of Area_5_office_40: 0.3221
+2021-03-22 23:00:43,020 - Model - INFO - Mean IoU of Area_5_office_4: 0.6104
+2021-03-22 23:01:59,520 - Model - INFO - Mean IoU of Area_5_office_32: 0.5431
+2021-03-22 23:37:28,111 - Model - INFO - Mean IoU of Area_5_office_37: 0.3527
+2021-03-22 23:39:04,899 - Model - INFO - Mean IoU of Area_5_office_36: 0.5627
+2021-03-22 23:40:02,636 - Model - INFO - Mean IoU of Area_5_office_6: 0.7083
+2021-03-22 23:40:46,554 - Model - INFO - Mean IoU of Area_5_hallway_14: 0.6982
+2021-03-22 23:47:11,610 - Model - INFO - Mean IoU of Area_5_office_29: 0.4723
+2021-03-22 23:48:13,403 - Model - INFO - Mean IoU of Area_5_office_3: 0.6182
+2021-03-22 23:49:14,071 - Model - INFO - Mean IoU of Area_5_hallway_4: 0.4389
+2021-03-22 23:50:04,259 - Model - INFO - Mean IoU of Area_5_storage_1: 0.2845
+2021-03-22 23:51:05,933 - Model - INFO - Mean IoU of Area_5_office_11: 0.6785
+2021-03-22 23:52:13,898 - Model - INFO - Mean IoU of Area_5_office_8: 0.6464
+2021-03-22 23:53:16,759 - Model - INFO - Mean IoU of Area_5_office_34: 0.5066
+2021-03-23 00:08:41,949 - Model - INFO - Mean IoU of Area_5_office_21: 0.4500
+2021-03-23 00:09:52,155 - Model - INFO - Mean IoU of Area_5_storage_2: 0.3086
+2021-03-23 00:10:45,935 - Model - INFO - Mean IoU of Area_5_office_9: 0.5462
+2021-03-23 00:11:16,601 - Model - INFO - Mean IoU of Area_5_office_30: 0.6644
+2021-03-23 00:12:19,829 - Model - INFO - Mean IoU of Area_5_hallway_11: 0.7421
+2021-03-23 00:32:10,619 - Model - INFO - Mean IoU of Area_5_conferenceRoom_2: 0.3155
+2021-03-23 00:33:02,607 - Model - INFO - Mean IoU of Area_5_office_5: 0.7290
+2021-03-23 00:34:08,688 - Model - INFO - Mean IoU of Area_5_hallway_12: 0.6288
+2021-03-23 00:35:13,321 - Model - INFO - Mean IoU of Area_5_office_7: 0.5866
+2021-03-23 00:35:47,226 - Model - INFO - Mean IoU of Area_5_hallway_8: 0.6048
+2021-03-23 00:36:49,140 - Model - INFO - Mean IoU of Area_5_office_33: 0.5921
+2021-03-23 00:38:44,124 - Model - INFO - Mean IoU of Area_5_hallway_9: 0.4113
+2021-03-23 01:21:34,247 - Model - INFO - Mean IoU of Area_5_office_38: 0.3317
+2021-03-23 01:24:50,771 - Model - INFO - Mean IoU of Area_5_hallway_10: 0.7789
+2021-03-23 01:26:15,162 - Model - INFO - Mean IoU of Area_5_office_13: 0.5317
+2021-03-23 01:27:12,264 - Model - INFO - Mean IoU of Area_5_WC_2: 0.6539
+2021-03-23 01:28:20,157 - Model - INFO - Mean IoU of Area_5_office_23: 0.6125
+2021-03-23 01:29:10,703 - Model - INFO - Mean IoU of Area_5_pantry_1: 0.7774
+2021-03-23 01:30:09,708 - Model - INFO - Mean IoU of Area_5_office_22: 0.5609
+2021-03-23 01:31:28,083 - Model - INFO - Mean IoU of Area_5_office_17: 0.5694
+2021-03-23 01:32:28,308 - Model - INFO - Mean IoU of Area_5_WC_1: 0.6234
+2021-03-23 01:33:31,703 - Model - INFO - Mean IoU of Area_5_office_2: 0.6312
+2021-03-23 03:14:17,339 - Model - INFO - Mean IoU of Area_5_hallway_5: 0.4598
+2021-03-23 03:15:53,343 - Model - INFO - Mean IoU of Area_5_conferenceRoom_1: 0.5010
+2021-03-23 03:18:12,557 - Model - INFO - Mean IoU of Area_5_hallway_15: 0.4911
+2021-03-23 03:19:14,402 - Model - INFO - Mean IoU of Area_5_office_16: 0.5011
+2021-03-23 03:21:11,178 - Model - INFO - Mean IoU of Area_5_hallway_7: 0.6784
+2021-03-23 03:22:53,958 - Model - INFO - Mean IoU of Area_5_hallway_3: 0.6897
+2021-03-23 03:23:03,386 - Model - INFO - Mean IoU of Area_5_storage_3: 0.4259
+2021-03-23 03:23:49,510 - Model - INFO - Mean IoU of Area_5_office_25: 0.6604
+2021-03-23 03:24:24,043 - Model - INFO - Mean IoU of Area_5_office_20: 0.5186
+2021-03-23 04:31:18,322 - Model - INFO - Mean IoU of Area_5_hallway_1: 0.2402
+2021-03-23 04:31:24,295 - Model - INFO - ------- IoU --------
+class ceiling , IoU: 0.874
+class floor , IoU: 0.978
+class wall , IoU: 0.712
+class beam , IoU: 0.000
+class column , IoU: 0.092
+class window , IoU: 0.521
+class door , IoU: 0.163
+class table , IoU: 0.582
+class chair , IoU: 0.486
+class sofa , IoU: 0.032
+class bookcase , IoU: 0.483
+class board , IoU: 0.390
+class clutter , IoU: 0.362
+
+2021-03-23 04:31:24,296 - Model - INFO - eval point avg class IoU: 0.436565
+2021-03-23 04:31:24,296 - Model - INFO - eval whole scene point avg class acc: 0.526288
+2021-03-23 04:31:24,296 - Model - INFO - eval whole scene point accuracy: 0.789107
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/sem_seg/pointnet_sem_seg/logs/pointnet_sem_seg.txt b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/sem_seg/pointnet_sem_seg/logs/pointnet_sem_seg.txt
new file mode 100644
index 0000000000..0b86bb331c
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/sem_seg/pointnet_sem_seg/logs/pointnet_sem_seg.txt
@@ -0,0 +1,3572 @@
+2021-03-21 12:08:36,607 - Model - INFO - PARAMETER ...
+2021-03-21 12:08:36,607 - Model - INFO - Namespace(batch_size=16, decay_rate=0.0001, epoch=128, gpu='0', learning_rate=0.001, log_dir='pointnet_sem_seg', lr_decay=0.7, model='pointnet_sem_seg', npoint=4096, optimizer='Adam', step_size=10, test_area=5)
+2021-03-21 15:55:23,787 - Model - INFO - The number of training data is: 47623
+2021-03-21 15:55:23,788 - Model - INFO - The number of test data is: 19191
+2021-03-21 15:55:23,954 - Model - INFO - No existing model, starting training from scratch...
+2021-03-21 15:55:23,956 - Model - INFO - **** Epoch 1 (1/128) ****
+2021-03-21 15:55:23,956 - Model - INFO - Learning rate:0.001000
+2021-03-21 16:02:32,048 - Model - INFO - Training mean loss: 1.245388
+2021-03-21 16:02:32,048 - Model - INFO - Training accuracy: 0.636718
+2021-03-21 16:02:32,048 - Model - INFO - Save model...
+2021-03-21 16:02:32,049 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/model.pth
+2021-03-21 16:02:32,157 - Model - INFO - Saving model....
+2021-03-21 16:02:32,549 - Model - INFO - ---- EPOCH 001 EVALUATION ----
+2021-03-21 16:04:12,660 - Model - INFO - eval mean loss: 1.370660
+2021-03-21 16:04:12,660 - Model - INFO - eval point avg class IoU: 0.253647
+2021-03-21 16:04:12,660 - Model - INFO - eval point accuracy: 0.640119
+2021-03-21 16:04:12,661 - Model - INFO - eval point avg class acc: 0.358281
+2021-03-21 16:04:12,661 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.089, IoU: 0.764
+class floor weight: 0.201, IoU: 0.766
+class wall weight: 0.166, IoU: 0.476
+class beam weight: 0.279, IoU: 0.000
+class column weight: 0.000, IoU: 0.000
+class window weight: 0.019, IoU: 0.285
+class door weight: 0.033, IoU: 0.103
+class table weight: 0.032, IoU: 0.162
+class chair weight: 0.040, IoU: 0.293
+class sofa weight: 0.019, IoU: 0.000
+class bookcase weight: 0.003, IoU: 0.355
+class board weight: 0.108, IoU: 0.014
+class clutter weight: 0.011, IoU: 0.080
+
+2021-03-21 16:04:12,662 - Model - INFO - Eval mean loss: 1.370660
+2021-03-21 16:04:12,662 - Model - INFO - Eval accuracy: 0.640119
+2021-03-21 16:04:12,662 - Model - INFO - Save model...
+2021-03-21 16:04:12,662 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/best_model.pth
+2021-03-21 16:04:12,770 - Model - INFO - Saving model....
+2021-03-21 16:04:12,770 - Model - INFO - Best mIoU: 0.253647
+2021-03-21 16:04:12,770 - Model - INFO - **** Epoch 2 (2/128) ****
+2021-03-21 16:04:12,770 - Model - INFO - Learning rate:0.001000
+2021-03-21 16:11:14,255 - Model - INFO - Training mean loss: 0.968061
+2021-03-21 16:11:14,256 - Model - INFO - Training accuracy: 0.708426
+2021-03-21 16:11:14,724 - Model - INFO - ---- EPOCH 002 EVALUATION ----
+2021-03-21 16:13:11,300 - Model - INFO - eval mean loss: 1.095824
+2021-03-21 16:13:11,300 - Model - INFO - eval point avg class IoU: 0.320598
+2021-03-21 16:13:11,300 - Model - INFO - eval point accuracy: 0.711514
+2021-03-21 16:13:11,301 - Model - INFO - eval point avg class acc: 0.418762
+2021-03-21 16:13:11,301 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.837
+class floor weight: 0.202, IoU: 0.876
+class wall weight: 0.170, IoU: 0.590
+class beam weight: 0.275, IoU: 0.000
+class column weight: 0.000, IoU: 0.003
+class window weight: 0.018, IoU: 0.182
+class door weight: 0.032, IoU: 0.150
+class table weight: 0.030, IoU: 0.383
+class chair weight: 0.038, IoU: 0.345
+class sofa weight: 0.018, IoU: 0.024
+class bookcase weight: 0.003, IoU: 0.405
+class board weight: 0.110, IoU: 0.195
+class clutter weight: 0.011, IoU: 0.177
+
+2021-03-21 16:13:11,301 - Model - INFO - Eval mean loss: 1.095824
+2021-03-21 16:13:11,301 - Model - INFO - Eval accuracy: 0.711514
+2021-03-21 16:13:11,301 - Model - INFO - Save model...
+2021-03-21 16:13:11,301 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/best_model.pth
+2021-03-21 16:13:11,485 - Model - INFO - Saving model....
+2021-03-21 16:13:11,485 - Model - INFO - Best mIoU: 0.320598
+2021-03-21 16:13:11,485 - Model - INFO - **** Epoch 3 (3/128) ****
+2021-03-21 16:13:11,485 - Model - INFO - Learning rate:0.001000
+2021-03-21 16:20:12,772 - Model - INFO - Training mean loss: 0.805200
+2021-03-21 16:20:12,772 - Model - INFO - Training accuracy: 0.749773
+2021-03-21 16:20:13,218 - Model - INFO - ---- EPOCH 003 EVALUATION ----
+2021-03-21 16:22:09,498 - Model - INFO - eval mean loss: 1.061679
+2021-03-21 16:22:09,498 - Model - INFO - eval point avg class IoU: 0.348705
+2021-03-21 16:22:09,498 - Model - INFO - eval point accuracy: 0.717259
+2021-03-21 16:22:09,499 - Model - INFO - eval point avg class acc: 0.478279
+2021-03-21 16:22:09,499 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.830
+class floor weight: 0.200, IoU: 0.923
+class wall weight: 0.167, IoU: 0.613
+class beam weight: 0.280, IoU: 0.001
+class column weight: 0.000, IoU: 0.012
+class window weight: 0.019, IoU: 0.396
+class door weight: 0.033, IoU: 0.200
+class table weight: 0.031, IoU: 0.407
+class chair weight: 0.038, IoU: 0.366
+class sofa weight: 0.020, IoU: 0.035
+class bookcase weight: 0.003, IoU: 0.366
+class board weight: 0.107, IoU: 0.164
+class clutter weight: 0.012, IoU: 0.219
+
+2021-03-21 16:22:09,502 - Model - INFO - Eval mean loss: 1.061679
+2021-03-21 16:22:09,502 - Model - INFO - Eval accuracy: 0.717259
+2021-03-21 16:22:09,502 - Model - INFO - Save model...
+2021-03-21 16:22:09,503 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/best_model.pth
+2021-03-21 16:22:09,997 - Model - INFO - Saving model....
+2021-03-21 16:22:09,998 - Model - INFO - Best mIoU: 0.348705
+2021-03-21 16:22:09,998 - Model - INFO - **** Epoch 4 (4/128) ****
+2021-03-21 16:22:09,998 - Model - INFO - Learning rate:0.001000
+2021-03-21 16:29:11,405 - Model - INFO - Training mean loss: 0.672402
+2021-03-21 16:29:11,405 - Model - INFO - Training accuracy: 0.790482
+2021-03-21 16:29:11,917 - Model - INFO - ---- EPOCH 004 EVALUATION ----
+2021-03-21 16:31:08,619 - Model - INFO - eval mean loss: 0.925488
+2021-03-21 16:31:08,620 - Model - INFO - eval point avg class IoU: 0.358306
+2021-03-21 16:31:08,620 - Model - INFO - eval point accuracy: 0.724071
+2021-03-21 16:31:08,620 - Model - INFO - eval point avg class acc: 0.506247
+2021-03-21 16:31:08,620 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.809
+class floor weight: 0.200, IoU: 0.936
+class wall weight: 0.168, IoU: 0.646
+class beam weight: 0.281, IoU: 0.002
+class column weight: 0.000, IoU: 0.005
+class window weight: 0.018, IoU: 0.443
+class door weight: 0.032, IoU: 0.146
+class table weight: 0.030, IoU: 0.442
+class chair weight: 0.038, IoU: 0.271
+class sofa weight: 0.019, IoU: 0.057
+class bookcase weight: 0.003, IoU: 0.488
+class board weight: 0.109, IoU: 0.178
+class clutter weight: 0.012, IoU: 0.235
+
+2021-03-21 16:31:08,620 - Model - INFO - Eval mean loss: 0.925488
+2021-03-21 16:31:08,621 - Model - INFO - Eval accuracy: 0.724071
+2021-03-21 16:31:08,621 - Model - INFO - Save model...
+2021-03-21 16:31:08,621 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/best_model.pth
+2021-03-21 16:31:09,071 - Model - INFO - Saving model....
+2021-03-21 16:31:09,071 - Model - INFO - Best mIoU: 0.358306
+2021-03-21 16:31:09,071 - Model - INFO - **** Epoch 5 (5/128) ****
+2021-03-21 16:31:09,071 - Model - INFO - Learning rate:0.001000
+2021-03-21 16:38:11,460 - Model - INFO - Training mean loss: 0.572615
+2021-03-21 16:38:11,460 - Model - INFO - Training accuracy: 0.818800
+2021-03-21 16:38:11,994 - Model - INFO - ---- EPOCH 005 EVALUATION ----
+2021-03-21 16:39:56,247 - Model - INFO - eval mean loss: 0.969927
+2021-03-21 16:39:56,248 - Model - INFO - eval point avg class IoU: 0.377436
+2021-03-21 16:39:56,248 - Model - INFO - eval point accuracy: 0.729780
+2021-03-21 16:39:56,248 - Model - INFO - eval point avg class acc: 0.516964
+2021-03-21 16:39:56,248 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.824
+class floor weight: 0.200, IoU: 0.952
+class wall weight: 0.167, IoU: 0.630
+class beam weight: 0.280, IoU: 0.002
+class column weight: 0.000, IoU: 0.018
+class window weight: 0.019, IoU: 0.450
+class door weight: 0.033, IoU: 0.195
+class table weight: 0.030, IoU: 0.509
+class chair weight: 0.038, IoU: 0.376
+class sofa weight: 0.019, IoU: 0.020
+class bookcase weight: 0.003, IoU: 0.435
+class board weight: 0.110, IoU: 0.250
+class clutter weight: 0.012, IoU: 0.247
+
+2021-03-21 16:39:56,248 - Model - INFO - Eval mean loss: 0.969927
+2021-03-21 16:39:56,249 - Model - INFO - Eval accuracy: 0.729780
+2021-03-21 16:39:56,249 - Model - INFO - Save model...
+2021-03-21 16:39:56,249 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/best_model.pth
+2021-03-21 16:39:56,400 - Model - INFO - Saving model....
+2021-03-21 16:39:56,401 - Model - INFO - Best mIoU: 0.377436
+2021-03-21 16:39:56,401 - Model - INFO - **** Epoch 6 (6/128) ****
+2021-03-21 16:39:56,401 - Model - INFO - Learning rate:0.001000
+2021-03-21 16:46:59,000 - Model - INFO - Training mean loss: 0.512581
+2021-03-21 16:46:59,001 - Model - INFO - Training accuracy: 0.836098
+2021-03-21 16:46:59,001 - Model - INFO - Save model...
+2021-03-21 16:46:59,001 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/model.pth
+2021-03-21 16:46:59,172 - Model - INFO - Saving model....
+2021-03-21 16:46:59,670 - Model - INFO - ---- EPOCH 006 EVALUATION ----
+2021-03-21 16:48:44,202 - Model - INFO - eval mean loss: 1.169176
+2021-03-21 16:48:44,202 - Model - INFO - eval point avg class IoU: 0.372071
+2021-03-21 16:48:44,202 - Model - INFO - eval point accuracy: 0.742360
+2021-03-21 16:48:44,202 - Model - INFO - eval point avg class acc: 0.478049
+2021-03-21 16:48:44,202 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.878
+class floor weight: 0.200, IoU: 0.922
+class wall weight: 0.168, IoU: 0.673
+class beam weight: 0.279, IoU: 0.000
+class column weight: 0.000, IoU: 0.010
+class window weight: 0.018, IoU: 0.496
+class door weight: 0.032, IoU: 0.160
+class table weight: 0.031, IoU: 0.439
+class chair weight: 0.040, IoU: 0.407
+class sofa weight: 0.019, IoU: 0.024
+class bookcase weight: 0.003, IoU: 0.284
+class board weight: 0.109, IoU: 0.282
+class clutter weight: 0.011, IoU: 0.261
+
+2021-03-21 16:48:44,203 - Model - INFO - Eval mean loss: 1.169176
+2021-03-21 16:48:44,203 - Model - INFO - Eval accuracy: 0.742360
+2021-03-21 16:48:44,203 - Model - INFO - Best mIoU: 0.377436
+2021-03-21 16:48:44,203 - Model - INFO - **** Epoch 7 (7/128) ****
+2021-03-21 16:48:44,203 - Model - INFO - Learning rate:0.001000
+2021-03-21 16:55:45,211 - Model - INFO - Training mean loss: 0.465419
+2021-03-21 16:55:45,211 - Model - INFO - Training accuracy: 0.850192
+2021-03-21 16:55:45,675 - Model - INFO - ---- EPOCH 007 EVALUATION ----
+2021-03-21 16:57:29,548 - Model - INFO - eval mean loss: 1.026273
+2021-03-21 16:57:29,548 - Model - INFO - eval point avg class IoU: 0.376046
+2021-03-21 16:57:29,548 - Model - INFO - eval point accuracy: 0.729849
+2021-03-21 16:57:29,548 - Model - INFO - eval point avg class acc: 0.505757
+2021-03-21 16:57:29,549 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.836
+class floor weight: 0.201, IoU: 0.872
+class wall weight: 0.168, IoU: 0.668
+class beam weight: 0.279, IoU: 0.002
+class column weight: 0.000, IoU: 0.060
+class window weight: 0.018, IoU: 0.474
+class door weight: 0.033, IoU: 0.114
+class table weight: 0.031, IoU: 0.491
+class chair weight: 0.039, IoU: 0.368
+class sofa weight: 0.019, IoU: 0.053
+class bookcase weight: 0.002, IoU: 0.391
+class board weight: 0.106, IoU: 0.272
+class clutter weight: 0.011, IoU: 0.288
+
+2021-03-21 16:57:29,549 - Model - INFO - Eval mean loss: 1.026273
+2021-03-21 16:57:29,550 - Model - INFO - Eval accuracy: 0.729849
+2021-03-21 16:57:29,550 - Model - INFO - Best mIoU: 0.377436
+2021-03-21 16:57:29,550 - Model - INFO - **** Epoch 8 (8/128) ****
+2021-03-21 16:57:29,550 - Model - INFO - Learning rate:0.001000
+2021-03-21 17:04:31,831 - Model - INFO - Training mean loss: 0.433396
+2021-03-21 17:04:31,831 - Model - INFO - Training accuracy: 0.859820
+2021-03-21 17:04:32,331 - Model - INFO - ---- EPOCH 008 EVALUATION ----
+2021-03-21 17:06:37,703 - Model - INFO - eval mean loss: 1.091104
+2021-03-21 17:06:37,704 - Model - INFO - eval point avg class IoU: 0.390108
+2021-03-21 17:06:37,704 - Model - INFO - eval point accuracy: 0.756088
+2021-03-21 17:06:37,704 - Model - INFO - eval point avg class acc: 0.520473
+2021-03-21 17:06:37,705 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.874
+class floor weight: 0.200, IoU: 0.964
+class wall weight: 0.167, IoU: 0.689
+class beam weight: 0.276, IoU: 0.002
+class column weight: 0.000, IoU: 0.025
+class window weight: 0.017, IoU: 0.431
+class door weight: 0.032, IoU: 0.204
+class table weight: 0.030, IoU: 0.544
+class chair weight: 0.039, IoU: 0.403
+class sofa weight: 0.019, IoU: 0.045
+class bookcase weight: 0.003, IoU: 0.401
+class board weight: 0.111, IoU: 0.226
+class clutter weight: 0.012, IoU: 0.264
+
+2021-03-21 17:06:37,705 - Model - INFO - Eval mean loss: 1.091104
+2021-03-21 17:06:37,705 - Model - INFO - Eval accuracy: 0.756088
+2021-03-21 17:06:37,705 - Model - INFO - Save model...
+2021-03-21 17:06:37,705 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/best_model.pth
+2021-03-21 17:06:38,283 - Model - INFO - Saving model....
+2021-03-21 17:06:38,284 - Model - INFO - Best mIoU: 0.390108
+2021-03-21 17:06:38,284 - Model - INFO - **** Epoch 9 (9/128) ****
+2021-03-21 17:06:38,284 - Model - INFO - Learning rate:0.001000
+2021-03-21 17:13:38,810 - Model - INFO - Training mean loss: 0.409605
+2021-03-21 17:13:38,811 - Model - INFO - Training accuracy: 0.866212
+2021-03-21 17:13:39,274 - Model - INFO - ---- EPOCH 009 EVALUATION ----
+2021-03-21 17:15:35,690 - Model - INFO - eval mean loss: 0.944729
+2021-03-21 17:15:35,690 - Model - INFO - eval point avg class IoU: 0.416496
+2021-03-21 17:15:35,690 - Model - INFO - eval point accuracy: 0.774653
+2021-03-21 17:15:35,690 - Model - INFO - eval point avg class acc: 0.543137
+2021-03-21 17:15:35,691 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.883
+class floor weight: 0.200, IoU: 0.970
+class wall weight: 0.168, IoU: 0.702
+class beam weight: 0.279, IoU: 0.001
+class column weight: 0.000, IoU: 0.017
+class window weight: 0.018, IoU: 0.521
+class door weight: 0.034, IoU: 0.149
+class table weight: 0.029, IoU: 0.561
+class chair weight: 0.039, IoU: 0.421
+class sofa weight: 0.019, IoU: 0.121
+class bookcase weight: 0.002, IoU: 0.477
+class board weight: 0.109, IoU: 0.282
+class clutter weight: 0.011, IoU: 0.310
+
+2021-03-21 17:15:35,691 - Model - INFO - Eval mean loss: 0.944729
+2021-03-21 17:15:35,691 - Model - INFO - Eval accuracy: 0.774653
+2021-03-21 17:15:35,691 - Model - INFO - Save model...
+2021-03-21 17:15:35,691 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/best_model.pth
+2021-03-21 17:15:35,886 - Model - INFO - Saving model....
+2021-03-21 17:15:35,887 - Model - INFO - Best mIoU: 0.416496
+2021-03-21 17:15:35,887 - Model - INFO - **** Epoch 10 (10/128) ****
+2021-03-21 17:15:35,887 - Model - INFO - Learning rate:0.001000
+2021-03-21 17:22:34,302 - Model - INFO - Training mean loss: 0.390220
+2021-03-21 17:22:34,302 - Model - INFO - Training accuracy: 0.872545
+2021-03-21 17:22:34,712 - Model - INFO - ---- EPOCH 010 EVALUATION ----
+2021-03-21 17:24:19,933 - Model - INFO - eval mean loss: 1.214444
+2021-03-21 17:24:19,933 - Model - INFO - eval point avg class IoU: 0.369084
+2021-03-21 17:24:19,933 - Model - INFO - eval point accuracy: 0.716060
+2021-03-21 17:24:19,934 - Model - INFO - eval point avg class acc: 0.506722
+2021-03-21 17:24:19,934 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.827
+class floor weight: 0.200, IoU: 0.798
+class wall weight: 0.168, IoU: 0.668
+class beam weight: 0.278, IoU: 0.004
+class column weight: 0.000, IoU: 0.030
+class window weight: 0.017, IoU: 0.531
+class door weight: 0.032, IoU: 0.076
+class table weight: 0.031, IoU: 0.457
+class chair weight: 0.039, IoU: 0.366
+class sofa weight: 0.019, IoU: 0.083
+class bookcase weight: 0.003, IoU: 0.391
+class board weight: 0.109, IoU: 0.269
+class clutter weight: 0.012, IoU: 0.298
+
+2021-03-21 17:24:19,935 - Model - INFO - Eval mean loss: 1.214444
+2021-03-21 17:24:19,935 - Model - INFO - Eval accuracy: 0.716060
+2021-03-21 17:24:19,935 - Model - INFO - Best mIoU: 0.416496
+2021-03-21 17:24:19,935 - Model - INFO - **** Epoch 11 (11/128) ****
+2021-03-21 17:24:19,935 - Model - INFO - Learning rate:0.000700
+2021-03-21 17:31:24,152 - Model - INFO - Training mean loss: 0.344885
+2021-03-21 17:31:24,153 - Model - INFO - Training accuracy: 0.886517
+2021-03-21 17:31:24,153 - Model - INFO - Save model...
+2021-03-21 17:31:24,153 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/model.pth
+2021-03-21 17:31:24,289 - Model - INFO - Saving model....
+2021-03-21 17:31:24,729 - Model - INFO - ---- EPOCH 011 EVALUATION ----
+2021-03-21 17:33:24,582 - Model - INFO - eval mean loss: 1.186402
+2021-03-21 17:33:24,583 - Model - INFO - eval point avg class IoU: 0.362230
+2021-03-21 17:33:24,583 - Model - INFO - eval point accuracy: 0.708684
+2021-03-21 17:33:24,583 - Model - INFO - eval point avg class acc: 0.510041
+2021-03-21 17:33:24,583 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.786
+class floor weight: 0.198, IoU: 0.791
+class wall weight: 0.165, IoU: 0.680
+class beam weight: 0.282, IoU: 0.002
+class column weight: 0.000, IoU: 0.049
+class window weight: 0.018, IoU: 0.495
+class door weight: 0.034, IoU: 0.121
+class table weight: 0.031, IoU: 0.438
+class chair weight: 0.039, IoU: 0.276
+class sofa weight: 0.019, IoU: 0.081
+class bookcase weight: 0.003, IoU: 0.413
+class board weight: 0.109, IoU: 0.307
+class clutter weight: 0.012, IoU: 0.270
+
+2021-03-21 17:33:24,583 - Model - INFO - Eval mean loss: 1.186402
+2021-03-21 17:33:24,583 - Model - INFO - Eval accuracy: 0.708684
+2021-03-21 17:33:24,583 - Model - INFO - Best mIoU: 0.416496
+2021-03-21 17:33:24,584 - Model - INFO - **** Epoch 12 (12/128) ****
+2021-03-21 17:33:24,584 - Model - INFO - Learning rate:0.000700
+2021-03-21 17:40:27,308 - Model - INFO - Training mean loss: 0.332054
+2021-03-21 17:40:27,309 - Model - INFO - Training accuracy: 0.890613
+2021-03-21 17:40:27,785 - Model - INFO - ---- EPOCH 012 EVALUATION ----
+2021-03-21 17:42:05,409 - Model - INFO - eval mean loss: 1.021606
+2021-03-21 17:42:05,410 - Model - INFO - eval point avg class IoU: 0.410493
+2021-03-21 17:42:05,410 - Model - INFO - eval point accuracy: 0.766381
+2021-03-21 17:42:05,410 - Model - INFO - eval point avg class acc: 0.518474
+2021-03-21 17:42:05,410 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.862
+class floor weight: 0.199, IoU: 0.950
+class wall weight: 0.167, IoU: 0.709
+class beam weight: 0.283, IoU: 0.001
+class column weight: 0.000, IoU: 0.038
+class window weight: 0.018, IoU: 0.484
+class door weight: 0.032, IoU: 0.165
+class table weight: 0.030, IoU: 0.524
+class chair weight: 0.039, IoU: 0.379
+class sofa weight: 0.018, IoU: 0.107
+class bookcase weight: 0.003, IoU: 0.464
+class board weight: 0.108, IoU: 0.350
+class clutter weight: 0.012, IoU: 0.304
+
+2021-03-21 17:42:05,411 - Model - INFO - Eval mean loss: 1.021606
+2021-03-21 17:42:05,411 - Model - INFO - Eval accuracy: 0.766381
+2021-03-21 17:42:05,411 - Model - INFO - Best mIoU: 0.416496
+2021-03-21 17:42:05,411 - Model - INFO - **** Epoch 13 (13/128) ****
+2021-03-21 17:42:05,411 - Model - INFO - Learning rate:0.000700
+2021-03-21 17:49:08,083 - Model - INFO - Training mean loss: 0.321023
+2021-03-21 17:49:08,083 - Model - INFO - Training accuracy: 0.894069
+2021-03-21 17:49:08,568 - Model - INFO - ---- EPOCH 013 EVALUATION ----
+2021-03-21 17:50:43,279 - Model - INFO - eval mean loss: 1.182312
+2021-03-21 17:50:43,280 - Model - INFO - eval point avg class IoU: 0.397243
+2021-03-21 17:50:43,280 - Model - INFO - eval point accuracy: 0.761849
+2021-03-21 17:50:43,280 - Model - INFO - eval point avg class acc: 0.498230
+2021-03-21 17:50:43,280 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.877
+class floor weight: 0.201, IoU: 0.973
+class wall weight: 0.167, IoU: 0.695
+class beam weight: 0.281, IoU: 0.001
+class column weight: 0.000, IoU: 0.008
+class window weight: 0.018, IoU: 0.542
+class door weight: 0.032, IoU: 0.167
+class table weight: 0.030, IoU: 0.462
+class chair weight: 0.039, IoU: 0.412
+class sofa weight: 0.019, IoU: 0.043
+class bookcase weight: 0.003, IoU: 0.370
+class board weight: 0.108, IoU: 0.319
+class clutter weight: 0.011, IoU: 0.294
+
+2021-03-21 17:50:43,281 - Model - INFO - Eval mean loss: 1.182312
+2021-03-21 17:50:43,281 - Model - INFO - Eval accuracy: 0.761849
+2021-03-21 17:50:43,281 - Model - INFO - Best mIoU: 0.416496
+2021-03-21 17:50:43,281 - Model - INFO - **** Epoch 14 (14/128) ****
+2021-03-21 17:50:43,282 - Model - INFO - Learning rate:0.000700
+2021-03-21 17:57:58,017 - Model - INFO - Training mean loss: 0.312073
+2021-03-21 17:57:58,018 - Model - INFO - Training accuracy: 0.897151
+2021-03-21 17:57:58,653 - Model - INFO - ---- EPOCH 014 EVALUATION ----
+2021-03-21 17:59:40,765 - Model - INFO - eval mean loss: 1.093210
+2021-03-21 17:59:40,765 - Model - INFO - eval point avg class IoU: 0.399390
+2021-03-21 17:59:40,766 - Model - INFO - eval point accuracy: 0.756678
+2021-03-21 17:59:40,766 - Model - INFO - eval point avg class acc: 0.519066
+2021-03-21 17:59:40,766 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.803
+class floor weight: 0.199, IoU: 0.975
+class wall weight: 0.166, IoU: 0.699
+class beam weight: 0.284, IoU: 0.003
+class column weight: 0.000, IoU: 0.061
+class window weight: 0.017, IoU: 0.476
+class door weight: 0.033, IoU: 0.069
+class table weight: 0.030, IoU: 0.538
+class chair weight: 0.038, IoU: 0.404
+class sofa weight: 0.018, IoU: 0.078
+class bookcase weight: 0.003, IoU: 0.464
+class board weight: 0.108, IoU: 0.313
+class clutter weight: 0.012, IoU: 0.309
+
+2021-03-21 17:59:40,766 - Model - INFO - Eval mean loss: 1.093210
+2021-03-21 17:59:40,766 - Model - INFO - Eval accuracy: 0.756678
+2021-03-21 17:59:40,766 - Model - INFO - Best mIoU: 0.416496
+2021-03-21 17:59:40,767 - Model - INFO - **** Epoch 15 (15/128) ****
+2021-03-21 17:59:40,767 - Model - INFO - Learning rate:0.000700
+2021-03-21 18:06:52,469 - Model - INFO - Training mean loss: 0.304226
+2021-03-21 18:06:52,469 - Model - INFO - Training accuracy: 0.899647
+2021-03-21 18:06:52,864 - Model - INFO - ---- EPOCH 015 EVALUATION ----
+2021-03-21 18:08:29,437 - Model - INFO - eval mean loss: 1.051392
+2021-03-21 18:08:29,438 - Model - INFO - eval point avg class IoU: 0.401273
+2021-03-21 18:08:29,438 - Model - INFO - eval point accuracy: 0.744380
+2021-03-21 18:08:29,438 - Model - INFO - eval point avg class acc: 0.523576
+2021-03-21 18:08:29,438 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.753
+class floor weight: 0.201, IoU: 0.977
+class wall weight: 0.168, IoU: 0.675
+class beam weight: 0.277, IoU: 0.001
+class column weight: 0.000, IoU: 0.099
+class window weight: 0.017, IoU: 0.503
+class door weight: 0.032, IoU: 0.096
+class table weight: 0.029, IoU: 0.474
+class chair weight: 0.038, IoU: 0.360
+class sofa weight: 0.019, IoU: 0.093
+class bookcase weight: 0.003, IoU: 0.498
+class board weight: 0.110, IoU: 0.390
+class clutter weight: 0.013, IoU: 0.297
+
+2021-03-21 18:08:29,439 - Model - INFO - Eval mean loss: 1.051392
+2021-03-21 18:08:29,439 - Model - INFO - Eval accuracy: 0.744380
+2021-03-21 18:08:29,439 - Model - INFO - Best mIoU: 0.416496
+2021-03-21 18:08:29,439 - Model - INFO - **** Epoch 16 (16/128) ****
+2021-03-21 18:08:29,439 - Model - INFO - Learning rate:0.000700
+2021-03-21 18:15:33,615 - Model - INFO - Training mean loss: 0.300286
+2021-03-21 18:15:33,616 - Model - INFO - Training accuracy: 0.901225
+2021-03-21 18:15:33,616 - Model - INFO - Save model...
+2021-03-21 18:15:33,616 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/model.pth
+2021-03-21 18:15:33,785 - Model - INFO - Saving model....
+2021-03-21 18:15:34,223 - Model - INFO - ---- EPOCH 016 EVALUATION ----
+2021-03-21 18:17:08,708 - Model - INFO - eval mean loss: 0.958777
+2021-03-21 18:17:08,708 - Model - INFO - eval point avg class IoU: 0.412182
+2021-03-21 18:17:08,708 - Model - INFO - eval point accuracy: 0.761440
+2021-03-21 18:17:08,708 - Model - INFO - eval point avg class acc: 0.537869
+2021-03-21 18:17:08,708 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.858
+class floor weight: 0.200, IoU: 0.975
+class wall weight: 0.167, IoU: 0.689
+class beam weight: 0.279, IoU: 0.001
+class column weight: 0.000, IoU: 0.068
+class window weight: 0.018, IoU: 0.527
+class door weight: 0.035, IoU: 0.120
+class table weight: 0.030, IoU: 0.513
+class chair weight: 0.038, IoU: 0.432
+class sofa weight: 0.019, IoU: 0.116
+class bookcase weight: 0.003, IoU: 0.455
+class board weight: 0.105, IoU: 0.291
+class clutter weight: 0.012, IoU: 0.314
+
+2021-03-21 18:17:08,709 - Model - INFO - Eval mean loss: 0.958777
+2021-03-21 18:17:08,709 - Model - INFO - Eval accuracy: 0.761440
+2021-03-21 18:17:08,709 - Model - INFO - Best mIoU: 0.416496
+2021-03-21 18:17:08,709 - Model - INFO - **** Epoch 17 (17/128) ****
+2021-03-21 18:17:08,709 - Model - INFO - Learning rate:0.000700
+2021-03-21 18:24:12,621 - Model - INFO - Training mean loss: 0.293014
+2021-03-21 18:24:12,621 - Model - INFO - Training accuracy: 0.903508
+2021-03-21 18:24:13,014 - Model - INFO - ---- EPOCH 017 EVALUATION ----
+2021-03-21 18:25:53,439 - Model - INFO - eval mean loss: 1.156071
+2021-03-21 18:25:53,440 - Model - INFO - eval point avg class IoU: 0.391456
+2021-03-21 18:25:53,440 - Model - INFO - eval point accuracy: 0.747829
+2021-03-21 18:25:53,440 - Model - INFO - eval point avg class acc: 0.520749
+2021-03-21 18:25:53,440 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.803
+class floor weight: 0.200, IoU: 0.975
+class wall weight: 0.166, IoU: 0.677
+class beam weight: 0.280, IoU: 0.003
+class column weight: 0.000, IoU: 0.046
+class window weight: 0.018, IoU: 0.515
+class door weight: 0.034, IoU: 0.064
+class table weight: 0.030, IoU: 0.506
+class chair weight: 0.039, IoU: 0.352
+class sofa weight: 0.020, IoU: 0.092
+class bookcase weight: 0.003, IoU: 0.469
+class board weight: 0.108, IoU: 0.310
+class clutter weight: 0.012, IoU: 0.276
+
+2021-03-21 18:25:53,442 - Model - INFO - Eval mean loss: 1.156071
+2021-03-21 18:25:53,442 - Model - INFO - Eval accuracy: 0.747829
+2021-03-21 18:25:53,442 - Model - INFO - Best mIoU: 0.416496
+2021-03-21 18:25:53,442 - Model - INFO - **** Epoch 18 (18/128) ****
+2021-03-21 18:25:53,442 - Model - INFO - Learning rate:0.000700
+2021-03-21 18:32:51,681 - Model - INFO - Training mean loss: 0.291138
+2021-03-21 18:32:51,682 - Model - INFO - Training accuracy: 0.903424
+2021-03-21 18:32:52,186 - Model - INFO - ---- EPOCH 018 EVALUATION ----
+2021-03-21 18:34:48,107 - Model - INFO - eval mean loss: 1.078577
+2021-03-21 18:34:48,107 - Model - INFO - eval point avg class IoU: 0.411946
+2021-03-21 18:34:48,107 - Model - INFO - eval point accuracy: 0.762047
+2021-03-21 18:34:48,107 - Model - INFO - eval point avg class acc: 0.532364
+2021-03-21 18:34:48,107 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.860
+class floor weight: 0.199, IoU: 0.968
+class wall weight: 0.164, IoU: 0.688
+class beam weight: 0.282, IoU: 0.002
+class column weight: 0.000, IoU: 0.036
+class window weight: 0.018, IoU: 0.543
+class door weight: 0.034, IoU: 0.164
+class table weight: 0.030, IoU: 0.523
+class chair weight: 0.039, IoU: 0.408
+class sofa weight: 0.018, IoU: 0.072
+class bookcase weight: 0.003, IoU: 0.473
+class board weight: 0.109, IoU: 0.305
+class clutter weight: 0.012, IoU: 0.314
+
+2021-03-21 18:34:48,108 - Model - INFO - Eval mean loss: 1.078577
+2021-03-21 18:34:48,108 - Model - INFO - Eval accuracy: 0.762047
+2021-03-21 18:34:48,108 - Model - INFO - Best mIoU: 0.416496
+2021-03-21 18:34:48,108 - Model - INFO - **** Epoch 19 (19/128) ****
+2021-03-21 18:34:48,108 - Model - INFO - Learning rate:0.000700
+2021-03-21 18:41:44,699 - Model - INFO - Training mean loss: 0.285189
+2021-03-21 18:41:44,700 - Model - INFO - Training accuracy: 0.905663
+2021-03-21 18:41:45,174 - Model - INFO - ---- EPOCH 019 EVALUATION ----
+2021-03-21 18:43:29,883 - Model - INFO - eval mean loss: 1.217753
+2021-03-21 18:43:29,883 - Model - INFO - eval point avg class IoU: 0.397558
+2021-03-21 18:43:29,883 - Model - INFO - eval point accuracy: 0.749816
+2021-03-21 18:43:29,883 - Model - INFO - eval point avg class acc: 0.505754
+2021-03-21 18:43:29,884 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.744
+class floor weight: 0.200, IoU: 0.979
+class wall weight: 0.165, IoU: 0.683
+class beam weight: 0.282, IoU: 0.001
+class column weight: 0.000, IoU: 0.039
+class window weight: 0.019, IoU: 0.495
+class door weight: 0.032, IoU: 0.101
+class table weight: 0.032, IoU: 0.540
+class chair weight: 0.039, IoU: 0.424
+class sofa weight: 0.018, IoU: 0.033
+class bookcase weight: 0.003, IoU: 0.481
+class board weight: 0.109, IoU: 0.345
+class clutter weight: 0.011, IoU: 0.302
+
+2021-03-21 18:43:29,884 - Model - INFO - Eval mean loss: 1.217753
+2021-03-21 18:43:29,884 - Model - INFO - Eval accuracy: 0.749816
+2021-03-21 18:43:29,884 - Model - INFO - Best mIoU: 0.416496
+2021-03-21 18:43:29,884 - Model - INFO - **** Epoch 20 (20/128) ****
+2021-03-21 18:43:29,884 - Model - INFO - Learning rate:0.000700
+2021-03-21 18:50:27,457 - Model - INFO - Training mean loss: 0.282355
+2021-03-21 18:50:27,458 - Model - INFO - Training accuracy: 0.906742
+2021-03-21 18:50:27,953 - Model - INFO - ---- EPOCH 020 EVALUATION ----
+2021-03-21 18:52:08,888 - Model - INFO - eval mean loss: 1.353670
+2021-03-21 18:52:08,889 - Model - INFO - eval point avg class IoU: 0.372912
+2021-03-21 18:52:08,889 - Model - INFO - eval point accuracy: 0.735467
+2021-03-21 18:52:08,889 - Model - INFO - eval point avg class acc: 0.482293
+2021-03-21 18:52:08,889 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.797
+class floor weight: 0.200, IoU: 0.974
+class wall weight: 0.167, IoU: 0.665
+class beam weight: 0.282, IoU: 0.001
+class column weight: 0.000, IoU: 0.035
+class window weight: 0.019, IoU: 0.385
+class door weight: 0.034, IoU: 0.061
+class table weight: 0.030, IoU: 0.505
+class chair weight: 0.038, IoU: 0.429
+class sofa weight: 0.019, IoU: 0.057
+class bookcase weight: 0.003, IoU: 0.342
+class board weight: 0.107, IoU: 0.304
+class clutter weight: 0.012, IoU: 0.293
+
+2021-03-21 18:52:08,890 - Model - INFO - Eval mean loss: 1.353670
+2021-03-21 18:52:08,890 - Model - INFO - Eval accuracy: 0.735467
+2021-03-21 18:52:08,890 - Model - INFO - Best mIoU: 0.416496
+2021-03-21 18:52:08,890 - Model - INFO - **** Epoch 21 (21/128) ****
+2021-03-21 18:52:08,890 - Model - INFO - Learning rate:0.000490
+2021-03-21 18:59:08,206 - Model - INFO - Training mean loss: 0.254339
+2021-03-21 18:59:08,207 - Model - INFO - Training accuracy: 0.915563
+2021-03-21 18:59:08,207 - Model - INFO - Save model...
+2021-03-21 18:59:08,207 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/model.pth
+2021-03-21 18:59:08,397 - Model - INFO - Saving model....
+2021-03-21 18:59:08,932 - Model - INFO - ---- EPOCH 021 EVALUATION ----
+2021-03-21 19:00:51,111 - Model - INFO - eval mean loss: 1.214007
+2021-03-21 19:00:51,111 - Model - INFO - eval point avg class IoU: 0.409424
+2021-03-21 19:00:51,111 - Model - INFO - eval point accuracy: 0.777035
+2021-03-21 19:00:51,112 - Model - INFO - eval point avg class acc: 0.512791
+2021-03-21 19:00:51,112 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.855
+class floor weight: 0.201, IoU: 0.961
+class wall weight: 0.167, IoU: 0.714
+class beam weight: 0.278, IoU: 0.004
+class column weight: 0.000, IoU: 0.040
+class window weight: 0.017, IoU: 0.513
+class door weight: 0.033, IoU: 0.076
+class table weight: 0.031, IoU: 0.516
+class chair weight: 0.039, IoU: 0.404
+class sofa weight: 0.018, IoU: 0.056
+class bookcase weight: 0.003, IoU: 0.477
+class board weight: 0.110, IoU: 0.394
+class clutter weight: 0.012, IoU: 0.312
+
+2021-03-21 19:00:51,113 - Model - INFO - Eval mean loss: 1.214007
+2021-03-21 19:00:51,113 - Model - INFO - Eval accuracy: 0.777035
+2021-03-21 19:00:51,113 - Model - INFO - Best mIoU: 0.416496
+2021-03-21 19:00:51,113 - Model - INFO - **** Epoch 22 (22/128) ****
+2021-03-21 19:00:51,114 - Model - INFO - Learning rate:0.000490
+2021-03-21 19:07:55,290 - Model - INFO - Training mean loss: 0.252234
+2021-03-21 19:07:55,290 - Model - INFO - Training accuracy: 0.916052
+2021-03-21 19:07:55,733 - Model - INFO - ---- EPOCH 022 EVALUATION ----
+2021-03-21 19:09:47,741 - Model - INFO - eval mean loss: 1.086110
+2021-03-21 19:09:47,741 - Model - INFO - eval point avg class IoU: 0.402692
+2021-03-21 19:09:47,741 - Model - INFO - eval point accuracy: 0.771565
+2021-03-21 19:09:47,742 - Model - INFO - eval point avg class acc: 0.499962
+2021-03-21 19:09:47,742 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.846
+class floor weight: 0.198, IoU: 0.982
+class wall weight: 0.165, IoU: 0.696
+class beam weight: 0.279, IoU: 0.003
+class column weight: 0.000, IoU: 0.068
+class window weight: 0.020, IoU: 0.582
+class door weight: 0.035, IoU: 0.026
+class table weight: 0.030, IoU: 0.510
+class chair weight: 0.038, IoU: 0.283
+class sofa weight: 0.018, IoU: 0.042
+class bookcase weight: 0.003, IoU: 0.494
+class board weight: 0.110, IoU: 0.379
+class clutter weight: 0.011, IoU: 0.325
+
+2021-03-21 19:09:47,742 - Model - INFO - Eval mean loss: 1.086110
+2021-03-21 19:09:47,742 - Model - INFO - Eval accuracy: 0.771565
+2021-03-21 19:09:47,742 - Model - INFO - Best mIoU: 0.416496
+2021-03-21 19:09:47,742 - Model - INFO - **** Epoch 23 (23/128) ****
+2021-03-21 19:09:47,742 - Model - INFO - Learning rate:0.000490
+2021-03-21 19:16:53,763 - Model - INFO - Training mean loss: 0.243796
+2021-03-21 19:16:53,763 - Model - INFO - Training accuracy: 0.918628
+2021-03-21 19:16:54,165 - Model - INFO - ---- EPOCH 023 EVALUATION ----
+2021-03-21 19:18:45,519 - Model - INFO - eval mean loss: 1.042610
+2021-03-21 19:18:45,519 - Model - INFO - eval point avg class IoU: 0.409798
+2021-03-21 19:18:45,519 - Model - INFO - eval point accuracy: 0.764501
+2021-03-21 19:18:45,520 - Model - INFO - eval point avg class acc: 0.515071
+2021-03-21 19:18:45,520 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.829
+class floor weight: 0.202, IoU: 0.975
+class wall weight: 0.167, IoU: 0.698
+class beam weight: 0.279, IoU: 0.002
+class column weight: 0.000, IoU: 0.074
+class window weight: 0.017, IoU: 0.536
+class door weight: 0.035, IoU: 0.115
+class table weight: 0.029, IoU: 0.545
+class chair weight: 0.038, IoU: 0.416
+class sofa weight: 0.020, IoU: 0.035
+class bookcase weight: 0.003, IoU: 0.473
+class board weight: 0.109, IoU: 0.312
+class clutter weight: 0.011, IoU: 0.317
+
+2021-03-21 19:18:45,520 - Model - INFO - Eval mean loss: 1.042610
+2021-03-21 19:18:45,520 - Model - INFO - Eval accuracy: 0.764501
+2021-03-21 19:18:45,520 - Model - INFO - Best mIoU: 0.416496
+2021-03-21 19:18:45,520 - Model - INFO - **** Epoch 24 (24/128) ****
+2021-03-21 19:18:45,520 - Model - INFO - Learning rate:0.000490
+2021-03-21 19:25:47,796 - Model - INFO - Training mean loss: 0.243416
+2021-03-21 19:25:47,796 - Model - INFO - Training accuracy: 0.918939
+2021-03-21 19:25:48,231 - Model - INFO - ---- EPOCH 024 EVALUATION ----
+2021-03-21 19:27:33,578 - Model - INFO - eval mean loss: 1.074731
+2021-03-21 19:27:33,578 - Model - INFO - eval point avg class IoU: 0.392449
+2021-03-21 19:27:33,578 - Model - INFO - eval point accuracy: 0.759852
+2021-03-21 19:27:33,578 - Model - INFO - eval point avg class acc: 0.519523
+2021-03-21 19:27:33,578 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.089, IoU: 0.869
+class floor weight: 0.200, IoU: 0.920
+class wall weight: 0.166, IoU: 0.692
+class beam weight: 0.284, IoU: 0.002
+class column weight: 0.000, IoU: 0.071
+class window weight: 0.018, IoU: 0.498
+class door weight: 0.032, IoU: 0.066
+class table weight: 0.029, IoU: 0.489
+class chair weight: 0.039, IoU: 0.333
+class sofa weight: 0.018, IoU: 0.064
+class bookcase weight: 0.003, IoU: 0.494
+class board weight: 0.109, IoU: 0.295
+class clutter weight: 0.012, IoU: 0.308
+
+2021-03-21 19:27:33,580 - Model - INFO - Eval mean loss: 1.074731
+2021-03-21 19:27:33,580 - Model - INFO - Eval accuracy: 0.759852
+2021-03-21 19:27:33,580 - Model - INFO - Best mIoU: 0.416496
+2021-03-21 19:27:33,580 - Model - INFO - **** Epoch 25 (25/128) ****
+2021-03-21 19:27:33,580 - Model - INFO - Learning rate:0.000490
+2021-03-21 19:34:37,307 - Model - INFO - Training mean loss: 0.241468
+2021-03-21 19:34:37,307 - Model - INFO - Training accuracy: 0.919420
+2021-03-21 19:34:37,682 - Model - INFO - ---- EPOCH 025 EVALUATION ----
+2021-03-21 19:36:14,234 - Model - INFO - eval mean loss: 1.172691
+2021-03-21 19:36:14,235 - Model - INFO - eval point avg class IoU: 0.413975
+2021-03-21 19:36:14,235 - Model - INFO - eval point accuracy: 0.764522
+2021-03-21 19:36:14,235 - Model - INFO - eval point avg class acc: 0.520813
+2021-03-21 19:36:14,236 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.874
+class floor weight: 0.200, IoU: 0.968
+class wall weight: 0.167, IoU: 0.698
+class beam weight: 0.279, IoU: 0.002
+class column weight: 0.000, IoU: 0.094
+class window weight: 0.018, IoU: 0.508
+class door weight: 0.033, IoU: 0.090
+class table weight: 0.029, IoU: 0.523
+class chair weight: 0.039, IoU: 0.419
+class sofa weight: 0.018, IoU: 0.110
+class bookcase weight: 0.003, IoU: 0.411
+class board weight: 0.109, IoU: 0.372
+class clutter weight: 0.011, IoU: 0.314
+
+2021-03-21 19:36:14,236 - Model - INFO - Eval mean loss: 1.172691
+2021-03-21 19:36:14,236 - Model - INFO - Eval accuracy: 0.764522
+2021-03-21 19:36:14,237 - Model - INFO - Best mIoU: 0.416496
+2021-03-21 19:36:14,237 - Model - INFO - **** Epoch 26 (26/128) ****
+2021-03-21 19:36:14,237 - Model - INFO - Learning rate:0.000490
+2021-03-21 19:43:17,840 - Model - INFO - Training mean loss: 0.240322
+2021-03-21 19:43:17,840 - Model - INFO - Training accuracy: 0.919926
+2021-03-21 19:43:17,840 - Model - INFO - Save model...
+2021-03-21 19:43:17,841 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/model.pth
+2021-03-21 19:43:18,279 - Model - INFO - Saving model....
+2021-03-21 19:43:18,751 - Model - INFO - ---- EPOCH 026 EVALUATION ----
+2021-03-21 19:44:52,904 - Model - INFO - eval mean loss: 1.114103
+2021-03-21 19:44:52,904 - Model - INFO - eval point avg class IoU: 0.392453
+2021-03-21 19:44:52,904 - Model - INFO - eval point accuracy: 0.737830
+2021-03-21 19:44:52,905 - Model - INFO - eval point avg class acc: 0.534499
+2021-03-21 19:44:52,905 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.770
+class floor weight: 0.200, IoU: 0.959
+class wall weight: 0.167, IoU: 0.690
+class beam weight: 0.277, IoU: 0.001
+class column weight: 0.000, IoU: 0.057
+class window weight: 0.019, IoU: 0.498
+class door weight: 0.033, IoU: 0.139
+class table weight: 0.031, IoU: 0.501
+class chair weight: 0.039, IoU: 0.337
+class sofa weight: 0.019, IoU: 0.119
+class bookcase weight: 0.003, IoU: 0.510
+class board weight: 0.109, IoU: 0.265
+class clutter weight: 0.012, IoU: 0.256
+
+2021-03-21 19:44:52,906 - Model - INFO - Eval mean loss: 1.114103
+2021-03-21 19:44:52,906 - Model - INFO - Eval accuracy: 0.737830
+2021-03-21 19:44:52,906 - Model - INFO - Best mIoU: 0.416496
+2021-03-21 19:44:52,906 - Model - INFO - **** Epoch 27 (27/128) ****
+2021-03-21 19:44:52,906 - Model - INFO - Learning rate:0.000490
+2021-03-21 19:51:57,729 - Model - INFO - Training mean loss: 0.240382
+2021-03-21 19:51:57,729 - Model - INFO - Training accuracy: 0.920165
+2021-03-21 19:51:58,273 - Model - INFO - ---- EPOCH 027 EVALUATION ----
+2021-03-21 19:53:32,661 - Model - INFO - eval mean loss: 1.203242
+2021-03-21 19:53:32,661 - Model - INFO - eval point avg class IoU: 0.398363
+2021-03-21 19:53:32,661 - Model - INFO - eval point accuracy: 0.759885
+2021-03-21 19:53:32,661 - Model - INFO - eval point avg class acc: 0.519668
+2021-03-21 19:53:32,662 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.881
+class floor weight: 0.199, IoU: 0.968
+class wall weight: 0.166, IoU: 0.691
+class beam weight: 0.280, IoU: 0.003
+class column weight: 0.000, IoU: 0.056
+class window weight: 0.019, IoU: 0.509
+class door weight: 0.033, IoU: 0.057
+class table weight: 0.032, IoU: 0.453
+class chair weight: 0.038, IoU: 0.345
+class sofa weight: 0.018, IoU: 0.131
+class bookcase weight: 0.003, IoU: 0.481
+class board weight: 0.110, IoU: 0.290
+class clutter weight: 0.011, IoU: 0.313
+
+2021-03-21 19:53:32,662 - Model - INFO - Eval mean loss: 1.203242
+2021-03-21 19:53:32,662 - Model - INFO - Eval accuracy: 0.759885
+2021-03-21 19:53:32,662 - Model - INFO - Best mIoU: 0.416496
+2021-03-21 19:53:32,662 - Model - INFO - **** Epoch 28 (28/128) ****
+2021-03-21 19:53:32,662 - Model - INFO - Learning rate:0.000490
+2021-03-21 20:00:35,992 - Model - INFO - Training mean loss: 0.237837
+2021-03-21 20:00:35,993 - Model - INFO - Training accuracy: 0.920797
+2021-03-21 20:00:36,457 - Model - INFO - ---- EPOCH 028 EVALUATION ----
+2021-03-21 20:02:19,427 - Model - INFO - eval mean loss: 1.189060
+2021-03-21 20:02:19,427 - Model - INFO - eval point avg class IoU: 0.394817
+2021-03-21 20:02:19,428 - Model - INFO - eval point accuracy: 0.770836
+2021-03-21 20:02:19,428 - Model - INFO - eval point avg class acc: 0.514605
+2021-03-21 20:02:19,428 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.877
+class floor weight: 0.202, IoU: 0.966
+class wall weight: 0.167, IoU: 0.697
+class beam weight: 0.280, IoU: 0.001
+class column weight: 0.000, IoU: 0.057
+class window weight: 0.017, IoU: 0.526
+class door weight: 0.034, IoU: 0.052
+class table weight: 0.030, IoU: 0.517
+class chair weight: 0.038, IoU: 0.348
+class sofa weight: 0.019, IoU: 0.060
+class bookcase weight: 0.002, IoU: 0.464
+class board weight: 0.108, IoU: 0.245
+class clutter weight: 0.012, IoU: 0.323
+
+2021-03-21 20:02:19,428 - Model - INFO - Eval mean loss: 1.189060
+2021-03-21 20:02:19,428 - Model - INFO - Eval accuracy: 0.770836
+2021-03-21 20:02:19,429 - Model - INFO - Best mIoU: 0.416496
+2021-03-21 20:02:19,429 - Model - INFO - **** Epoch 29 (29/128) ****
+2021-03-21 20:02:19,429 - Model - INFO - Learning rate:0.000490
+2021-03-21 20:09:23,422 - Model - INFO - Training mean loss: 0.236513
+2021-03-21 20:09:23,423 - Model - INFO - Training accuracy: 0.921148
+2021-03-21 20:09:23,890 - Model - INFO - ---- EPOCH 029 EVALUATION ----
+2021-03-21 20:11:09,174 - Model - INFO - eval mean loss: 1.287402
+2021-03-21 20:11:09,174 - Model - INFO - eval point avg class IoU: 0.390118
+2021-03-21 20:11:09,174 - Model - INFO - eval point accuracy: 0.741568
+2021-03-21 20:11:09,175 - Model - INFO - eval point avg class acc: 0.515558
+2021-03-21 20:11:09,175 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.848
+class floor weight: 0.199, IoU: 0.947
+class wall weight: 0.168, IoU: 0.684
+class beam weight: 0.280, IoU: 0.002
+class column weight: 0.000, IoU: 0.042
+class window weight: 0.017, IoU: 0.548
+class door weight: 0.033, IoU: 0.078
+class table weight: 0.030, IoU: 0.520
+class chair weight: 0.039, IoU: 0.441
+class sofa weight: 0.020, IoU: 0.048
+class bookcase weight: 0.003, IoU: 0.410
+class board weight: 0.110, IoU: 0.226
+class clutter weight: 0.011, IoU: 0.279
+
+2021-03-21 20:11:09,175 - Model - INFO - Eval mean loss: 1.287402
+2021-03-21 20:11:09,175 - Model - INFO - Eval accuracy: 0.741568
+2021-03-21 20:11:09,175 - Model - INFO - Best mIoU: 0.416496
+2021-03-21 20:11:09,175 - Model - INFO - **** Epoch 30 (30/128) ****
+2021-03-21 20:11:09,176 - Model - INFO - Learning rate:0.000490
+2021-03-21 20:18:14,093 - Model - INFO - Training mean loss: 0.236057
+2021-03-21 20:18:14,094 - Model - INFO - Training accuracy: 0.921445
+2021-03-21 20:18:14,492 - Model - INFO - ---- EPOCH 030 EVALUATION ----
+2021-03-21 20:20:08,694 - Model - INFO - eval mean loss: 1.176421
+2021-03-21 20:20:08,695 - Model - INFO - eval point avg class IoU: 0.389944
+2021-03-21 20:20:08,695 - Model - INFO - eval point accuracy: 0.745655
+2021-03-21 20:20:08,695 - Model - INFO - eval point avg class acc: 0.518320
+2021-03-21 20:20:08,695 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.830
+class floor weight: 0.200, IoU: 0.973
+class wall weight: 0.167, IoU: 0.653
+class beam weight: 0.279, IoU: 0.000
+class column weight: 0.000, IoU: 0.090
+class window weight: 0.019, IoU: 0.403
+class door weight: 0.032, IoU: 0.068
+class table weight: 0.030, IoU: 0.511
+class chair weight: 0.038, IoU: 0.335
+class sofa weight: 0.018, IoU: 0.128
+class bookcase weight: 0.003, IoU: 0.453
+class board weight: 0.109, IoU: 0.307
+class clutter weight: 0.012, IoU: 0.316
+
+2021-03-21 20:20:08,695 - Model - INFO - Eval mean loss: 1.176421
+2021-03-21 20:20:08,696 - Model - INFO - Eval accuracy: 0.745655
+2021-03-21 20:20:08,696 - Model - INFO - Best mIoU: 0.416496
+2021-03-21 20:20:08,696 - Model - INFO - **** Epoch 31 (31/128) ****
+2021-03-21 20:20:08,696 - Model - INFO - Learning rate:0.000343
+2021-03-21 20:27:17,073 - Model - INFO - Training mean loss: 0.215839
+2021-03-21 20:27:17,074 - Model - INFO - Training accuracy: 0.927641
+2021-03-21 20:27:17,074 - Model - INFO - Save model...
+2021-03-21 20:27:17,074 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/model.pth
+2021-03-21 20:27:17,237 - Model - INFO - Saving model....
+2021-03-21 20:27:17,680 - Model - INFO - ---- EPOCH 031 EVALUATION ----
+2021-03-21 20:29:04,210 - Model - INFO - eval mean loss: 1.270073
+2021-03-21 20:29:04,210 - Model - INFO - eval point avg class IoU: 0.409871
+2021-03-21 20:29:04,210 - Model - INFO - eval point accuracy: 0.761716
+2021-03-21 20:29:04,210 - Model - INFO - eval point avg class acc: 0.526399
+2021-03-21 20:29:04,211 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.852
+class floor weight: 0.199, IoU: 0.968
+class wall weight: 0.166, IoU: 0.678
+class beam weight: 0.281, IoU: 0.004
+class column weight: 0.000, IoU: 0.045
+class window weight: 0.017, IoU: 0.517
+class door weight: 0.034, IoU: 0.096
+class table weight: 0.030, IoU: 0.532
+class chair weight: 0.039, IoU: 0.443
+class sofa weight: 0.018, IoU: 0.047
+class bookcase weight: 0.003, IoU: 0.443
+class board weight: 0.109, IoU: 0.391
+class clutter weight: 0.012, IoU: 0.312
+
+2021-03-21 20:29:04,211 - Model - INFO - Eval mean loss: 1.270073
+2021-03-21 20:29:04,211 - Model - INFO - Eval accuracy: 0.761716
+2021-03-21 20:29:04,211 - Model - INFO - Best mIoU: 0.416496
+2021-03-21 20:29:04,211 - Model - INFO - **** Epoch 32 (32/128) ****
+2021-03-21 20:29:04,211 - Model - INFO - Learning rate:0.000343
+2021-03-21 20:36:08,701 - Model - INFO - Training mean loss: 0.212278
+2021-03-21 20:36:08,701 - Model - INFO - Training accuracy: 0.929099
+2021-03-21 20:36:09,172 - Model - INFO - ---- EPOCH 032 EVALUATION ----
+2021-03-21 20:37:56,464 - Model - INFO - eval mean loss: 1.116414
+2021-03-21 20:37:56,464 - Model - INFO - eval point avg class IoU: 0.409128
+2021-03-21 20:37:56,464 - Model - INFO - eval point accuracy: 0.765508
+2021-03-21 20:37:56,464 - Model - INFO - eval point avg class acc: 0.538862
+2021-03-21 20:37:56,465 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.820
+class floor weight: 0.203, IoU: 0.956
+class wall weight: 0.170, IoU: 0.694
+class beam weight: 0.278, IoU: 0.001
+class column weight: 0.000, IoU: 0.079
+class window weight: 0.018, IoU: 0.516
+class door weight: 0.032, IoU: 0.083
+class table weight: 0.029, IoU: 0.521
+class chair weight: 0.039, IoU: 0.431
+class sofa weight: 0.019, IoU: 0.101
+class bookcase weight: 0.003, IoU: 0.522
+class board weight: 0.106, IoU: 0.308
+class clutter weight: 0.012, IoU: 0.286
+
+2021-03-21 20:37:56,466 - Model - INFO - Eval mean loss: 1.116414
+2021-03-21 20:37:56,466 - Model - INFO - Eval accuracy: 0.765508
+2021-03-21 20:37:56,466 - Model - INFO - Best mIoU: 0.416496
+2021-03-21 20:37:56,467 - Model - INFO - **** Epoch 33 (33/128) ****
+2021-03-21 20:37:56,467 - Model - INFO - Learning rate:0.000343
+2021-03-21 20:45:10,035 - Model - INFO - Training mean loss: 0.211234
+2021-03-21 20:45:10,036 - Model - INFO - Training accuracy: 0.929069
+2021-03-21 20:45:10,489 - Model - INFO - ---- EPOCH 033 EVALUATION ----
+2021-03-21 20:46:57,384 - Model - INFO - eval mean loss: 1.306747
+2021-03-21 20:46:57,384 - Model - INFO - eval point avg class IoU: 0.372982
+2021-03-21 20:46:57,384 - Model - INFO - eval point accuracy: 0.722214
+2021-03-21 20:46:57,384 - Model - INFO - eval point avg class acc: 0.526592
+2021-03-21 20:46:57,385 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.093, IoU: 0.862
+class floor weight: 0.202, IoU: 0.931
+class wall weight: 0.167, IoU: 0.643
+class beam weight: 0.277, IoU: 0.003
+class column weight: 0.000, IoU: 0.064
+class window weight: 0.017, IoU: 0.468
+class door weight: 0.036, IoU: 0.115
+class table weight: 0.029, IoU: 0.461
+class chair weight: 0.039, IoU: 0.327
+class sofa weight: 0.019, IoU: 0.030
+class bookcase weight: 0.003, IoU: 0.431
+class board weight: 0.107, IoU: 0.228
+class clutter weight: 0.012, IoU: 0.286
+
+2021-03-21 20:46:57,385 - Model - INFO - Eval mean loss: 1.306747
+2021-03-21 20:46:57,386 - Model - INFO - Eval accuracy: 0.722214
+2021-03-21 20:46:57,386 - Model - INFO - Best mIoU: 0.416496
+2021-03-21 20:46:57,386 - Model - INFO - **** Epoch 34 (34/128) ****
+2021-03-21 20:46:57,386 - Model - INFO - Learning rate:0.000343
+2021-03-21 20:54:06,891 - Model - INFO - Training mean loss: 0.208546
+2021-03-21 20:54:06,892 - Model - INFO - Training accuracy: 0.929984
+2021-03-21 20:54:07,370 - Model - INFO - ---- EPOCH 034 EVALUATION ----
+2021-03-21 20:55:57,408 - Model - INFO - eval mean loss: 1.211445
+2021-03-21 20:55:57,408 - Model - INFO - eval point avg class IoU: 0.404436
+2021-03-21 20:55:57,408 - Model - INFO - eval point accuracy: 0.770367
+2021-03-21 20:55:57,408 - Model - INFO - eval point avg class acc: 0.498519
+2021-03-21 20:55:57,408 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.880
+class floor weight: 0.201, IoU: 0.975
+class wall weight: 0.168, IoU: 0.687
+class beam weight: 0.277, IoU: 0.001
+class column weight: 0.000, IoU: 0.069
+class window weight: 0.019, IoU: 0.540
+class door weight: 0.032, IoU: 0.051
+class table weight: 0.031, IoU: 0.527
+class chair weight: 0.039, IoU: 0.379
+class sofa weight: 0.018, IoU: 0.061
+class bookcase weight: 0.003, IoU: 0.436
+class board weight: 0.108, IoU: 0.317
+class clutter weight: 0.011, IoU: 0.333
+
+2021-03-21 20:55:57,410 - Model - INFO - Eval mean loss: 1.211445
+2021-03-21 20:55:57,410 - Model - INFO - Eval accuracy: 0.770367
+2021-03-21 20:55:57,410 - Model - INFO - Best mIoU: 0.416496
+2021-03-21 20:55:57,410 - Model - INFO - **** Epoch 35 (35/128) ****
+2021-03-21 20:55:57,410 - Model - INFO - Learning rate:0.000343
+2021-03-21 21:02:59,816 - Model - INFO - Training mean loss: 0.208965
+2021-03-21 21:02:59,817 - Model - INFO - Training accuracy: 0.929793
+2021-03-21 21:03:00,283 - Model - INFO - ---- EPOCH 035 EVALUATION ----
+2021-03-21 21:04:34,736 - Model - INFO - eval mean loss: 1.244278
+2021-03-21 21:04:34,736 - Model - INFO - eval point avg class IoU: 0.396191
+2021-03-21 21:04:34,736 - Model - INFO - eval point accuracy: 0.758395
+2021-03-21 21:04:34,737 - Model - INFO - eval point avg class acc: 0.528610
+2021-03-21 21:04:34,737 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.874
+class floor weight: 0.199, IoU: 0.935
+class wall weight: 0.165, IoU: 0.696
+class beam weight: 0.285, IoU: 0.003
+class column weight: 0.000, IoU: 0.085
+class window weight: 0.018, IoU: 0.484
+class door weight: 0.033, IoU: 0.077
+class table weight: 0.030, IoU: 0.497
+class chair weight: 0.038, IoU: 0.370
+class sofa weight: 0.019, IoU: 0.101
+class bookcase weight: 0.003, IoU: 0.462
+class board weight: 0.108, IoU: 0.254
+class clutter weight: 0.012, IoU: 0.313
+
+2021-03-21 21:04:34,737 - Model - INFO - Eval mean loss: 1.244278
+2021-03-21 21:04:34,737 - Model - INFO - Eval accuracy: 0.758395
+2021-03-21 21:04:34,737 - Model - INFO - Best mIoU: 0.416496
+2021-03-21 21:04:34,737 - Model - INFO - **** Epoch 36 (36/128) ****
+2021-03-21 21:04:34,737 - Model - INFO - Learning rate:0.000343
+2021-03-21 21:11:37,006 - Model - INFO - Training mean loss: 0.207099
+2021-03-21 21:11:37,007 - Model - INFO - Training accuracy: 0.930556
+2021-03-21 21:11:37,007 - Model - INFO - Save model...
+2021-03-21 21:11:37,007 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/model.pth
+2021-03-21 21:11:37,145 - Model - INFO - Saving model....
+2021-03-21 21:11:37,608 - Model - INFO - ---- EPOCH 036 EVALUATION ----
+2021-03-21 21:13:19,816 - Model - INFO - eval mean loss: 1.181748
+2021-03-21 21:13:19,816 - Model - INFO - eval point avg class IoU: 0.409459
+2021-03-21 21:13:19,817 - Model - INFO - eval point accuracy: 0.751491
+2021-03-21 21:13:19,817 - Model - INFO - eval point avg class acc: 0.554133
+2021-03-21 21:13:19,817 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.862
+class floor weight: 0.200, IoU: 0.971
+class wall weight: 0.167, IoU: 0.665
+class beam weight: 0.279, IoU: 0.000
+class column weight: 0.000, IoU: 0.075
+class window weight: 0.017, IoU: 0.515
+class door weight: 0.033, IoU: 0.165
+class table weight: 0.030, IoU: 0.550
+class chair weight: 0.038, IoU: 0.370
+class sofa weight: 0.018, IoU: 0.103
+class bookcase weight: 0.003, IoU: 0.442
+class board weight: 0.110, IoU: 0.297
+class clutter weight: 0.012, IoU: 0.308
+
+2021-03-21 21:13:19,817 - Model - INFO - Eval mean loss: 1.181748
+2021-03-21 21:13:19,817 - Model - INFO - Eval accuracy: 0.751491
+2021-03-21 21:13:19,817 - Model - INFO - Best mIoU: 0.416496
+2021-03-21 21:13:19,817 - Model - INFO - **** Epoch 37 (37/128) ****
+2021-03-21 21:13:19,817 - Model - INFO - Learning rate:0.000343
+2021-03-21 21:20:20,638 - Model - INFO - Training mean loss: 0.206496
+2021-03-21 21:20:20,638 - Model - INFO - Training accuracy: 0.930663
+2021-03-21 21:20:21,081 - Model - INFO - ---- EPOCH 037 EVALUATION ----
+2021-03-21 21:21:58,539 - Model - INFO - eval mean loss: 1.169024
+2021-03-21 21:21:58,539 - Model - INFO - eval point avg class IoU: 0.419681
+2021-03-21 21:21:58,539 - Model - INFO - eval point accuracy: 0.774543
+2021-03-21 21:21:58,540 - Model - INFO - eval point avg class acc: 0.539303
+2021-03-21 21:21:58,540 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.866
+class floor weight: 0.199, IoU: 0.975
+class wall weight: 0.167, IoU: 0.704
+class beam weight: 0.279, IoU: 0.002
+class column weight: 0.000, IoU: 0.063
+class window weight: 0.020, IoU: 0.556
+class door weight: 0.034, IoU: 0.209
+class table weight: 0.030, IoU: 0.528
+class chair weight: 0.039, IoU: 0.427
+class sofa weight: 0.018, IoU: 0.031
+class bookcase weight: 0.003, IoU: 0.420
+class board weight: 0.109, IoU: 0.345
+class clutter weight: 0.011, IoU: 0.331
+
+2021-03-21 21:21:58,540 - Model - INFO - Eval mean loss: 1.169024
+2021-03-21 21:21:58,540 - Model - INFO - Eval accuracy: 0.774543
+2021-03-21 21:21:58,540 - Model - INFO - Save model...
+2021-03-21 21:21:58,540 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/best_model.pth
+2021-03-21 21:21:58,687 - Model - INFO - Saving model....
+2021-03-21 21:21:58,687 - Model - INFO - Best mIoU: 0.419681
+2021-03-21 21:21:58,688 - Model - INFO - **** Epoch 38 (38/128) ****
+2021-03-21 21:21:58,688 - Model - INFO - Learning rate:0.000343
+2021-03-21 21:29:05,336 - Model - INFO - Training mean loss: 0.206943
+2021-03-21 21:29:05,337 - Model - INFO - Training accuracy: 0.930872
+2021-03-21 21:29:05,782 - Model - INFO - ---- EPOCH 038 EVALUATION ----
+2021-03-21 21:30:40,851 - Model - INFO - eval mean loss: 1.193345
+2021-03-21 21:30:40,852 - Model - INFO - eval point avg class IoU: 0.390689
+2021-03-21 21:30:40,852 - Model - INFO - eval point accuracy: 0.760836
+2021-03-21 21:30:40,852 - Model - INFO - eval point avg class acc: 0.504468
+2021-03-21 21:30:40,852 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.868
+class floor weight: 0.202, IoU: 0.976
+class wall weight: 0.168, IoU: 0.685
+class beam weight: 0.280, IoU: 0.001
+class column weight: 0.000, IoU: 0.042
+class window weight: 0.020, IoU: 0.497
+class door weight: 0.031, IoU: 0.074
+class table weight: 0.029, IoU: 0.457
+class chair weight: 0.039, IoU: 0.393
+class sofa weight: 0.019, IoU: 0.036
+class bookcase weight: 0.003, IoU: 0.497
+class board weight: 0.108, IoU: 0.247
+class clutter weight: 0.011, IoU: 0.305
+
+2021-03-21 21:30:40,853 - Model - INFO - Eval mean loss: 1.193345
+2021-03-21 21:30:40,853 - Model - INFO - Eval accuracy: 0.760836
+2021-03-21 21:30:40,853 - Model - INFO - Best mIoU: 0.419681
+2021-03-21 21:30:40,853 - Model - INFO - **** Epoch 39 (39/128) ****
+2021-03-21 21:30:40,853 - Model - INFO - Learning rate:0.000343
+2021-03-21 21:37:43,803 - Model - INFO - Training mean loss: 0.203373
+2021-03-21 21:37:43,803 - Model - INFO - Training accuracy: 0.931680
+2021-03-21 21:37:44,223 - Model - INFO - ---- EPOCH 039 EVALUATION ----
+2021-03-21 21:39:27,814 - Model - INFO - eval mean loss: 1.229800
+2021-03-21 21:39:27,814 - Model - INFO - eval point avg class IoU: 0.408667
+2021-03-21 21:39:27,815 - Model - INFO - eval point accuracy: 0.779255
+2021-03-21 21:39:27,815 - Model - INFO - eval point avg class acc: 0.518439
+2021-03-21 21:39:27,815 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.870
+class floor weight: 0.201, IoU: 0.974
+class wall weight: 0.167, IoU: 0.708
+class beam weight: 0.279, IoU: 0.002
+class column weight: 0.000, IoU: 0.052
+class window weight: 0.019, IoU: 0.496
+class door weight: 0.032, IoU: 0.058
+class table weight: 0.029, IoU: 0.529
+class chair weight: 0.039, IoU: 0.398
+class sofa weight: 0.019, IoU: 0.037
+class bookcase weight: 0.003, IoU: 0.515
+class board weight: 0.109, IoU: 0.373
+class clutter weight: 0.012, IoU: 0.302
+
+2021-03-21 21:39:27,815 - Model - INFO - Eval mean loss: 1.229800
+2021-03-21 21:39:27,815 - Model - INFO - Eval accuracy: 0.779255
+2021-03-21 21:39:27,816 - Model - INFO - Best mIoU: 0.419681
+2021-03-21 21:39:27,816 - Model - INFO - **** Epoch 40 (40/128) ****
+2021-03-21 21:39:27,816 - Model - INFO - Learning rate:0.000343
+2021-03-21 21:46:46,123 - Model - INFO - Training mean loss: 0.205424
+2021-03-21 21:46:46,123 - Model - INFO - Training accuracy: 0.930961
+2021-03-21 21:46:46,597 - Model - INFO - ---- EPOCH 040 EVALUATION ----
+2021-03-21 21:48:29,410 - Model - INFO - eval mean loss: 1.182627
+2021-03-21 21:48:29,411 - Model - INFO - eval point avg class IoU: 0.413356
+2021-03-21 21:48:29,411 - Model - INFO - eval point accuracy: 0.766765
+2021-03-21 21:48:29,411 - Model - INFO - eval point avg class acc: 0.538446
+2021-03-21 21:48:29,411 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.849
+class floor weight: 0.202, IoU: 0.977
+class wall weight: 0.169, IoU: 0.704
+class beam weight: 0.276, IoU: 0.002
+class column weight: 0.000, IoU: 0.083
+class window weight: 0.019, IoU: 0.502
+class door weight: 0.033, IoU: 0.162
+class table weight: 0.029, IoU: 0.544
+class chair weight: 0.039, IoU: 0.366
+class sofa weight: 0.019, IoU: 0.099
+class bookcase weight: 0.003, IoU: 0.408
+class board weight: 0.108, IoU: 0.357
+class clutter weight: 0.011, IoU: 0.320
+
+2021-03-21 21:48:29,412 - Model - INFO - Eval mean loss: 1.182627
+2021-03-21 21:48:29,412 - Model - INFO - Eval accuracy: 0.766765
+2021-03-21 21:48:29,412 - Model - INFO - Best mIoU: 0.419681
+2021-03-21 21:48:29,412 - Model - INFO - **** Epoch 41 (41/128) ****
+2021-03-21 21:48:29,412 - Model - INFO - Learning rate:0.000240
+2021-03-21 21:55:49,244 - Model - INFO - Training mean loss: 0.191900
+2021-03-21 21:55:49,244 - Model - INFO - Training accuracy: 0.935573
+2021-03-21 21:55:49,244 - Model - INFO - Save model...
+2021-03-21 21:55:49,244 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/model.pth
+2021-03-21 21:55:49,455 - Model - INFO - Saving model....
+2021-03-21 21:55:49,878 - Model - INFO - ---- EPOCH 041 EVALUATION ----
+2021-03-21 21:57:42,586 - Model - INFO - eval mean loss: 1.313206
+2021-03-21 21:57:42,586 - Model - INFO - eval point avg class IoU: 0.412765
+2021-03-21 21:57:42,587 - Model - INFO - eval point accuracy: 0.766206
+2021-03-21 21:57:42,587 - Model - INFO - eval point avg class acc: 0.523212
+2021-03-21 21:57:42,587 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.842
+class floor weight: 0.200, IoU: 0.920
+class wall weight: 0.167, IoU: 0.702
+class beam weight: 0.279, IoU: 0.001
+class column weight: 0.000, IoU: 0.065
+class window weight: 0.018, IoU: 0.518
+class door weight: 0.034, IoU: 0.131
+class table weight: 0.031, IoU: 0.534
+class chair weight: 0.039, IoU: 0.401
+class sofa weight: 0.019, IoU: 0.095
+class bookcase weight: 0.002, IoU: 0.445
+class board weight: 0.106, IoU: 0.390
+class clutter weight: 0.012, IoU: 0.323
+
+2021-03-21 21:57:42,588 - Model - INFO - Eval mean loss: 1.313206
+2021-03-21 21:57:42,588 - Model - INFO - Eval accuracy: 0.766206
+2021-03-21 21:57:42,588 - Model - INFO - Best mIoU: 0.419681
+2021-03-21 21:57:42,589 - Model - INFO - **** Epoch 42 (42/128) ****
+2021-03-21 21:57:42,589 - Model - INFO - Learning rate:0.000240
+2021-03-21 22:04:56,010 - Model - INFO - Training mean loss: 0.189989
+2021-03-21 22:04:56,010 - Model - INFO - Training accuracy: 0.936103
+2021-03-21 22:04:56,495 - Model - INFO - ---- EPOCH 042 EVALUATION ----
+2021-03-21 22:07:01,936 - Model - INFO - eval mean loss: 1.229263
+2021-03-21 22:07:01,936 - Model - INFO - eval point avg class IoU: 0.408018
+2021-03-21 22:07:01,936 - Model - INFO - eval point accuracy: 0.768151
+2021-03-21 22:07:01,937 - Model - INFO - eval point avg class acc: 0.511002
+2021-03-21 22:07:01,937 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.875
+class floor weight: 0.200, IoU: 0.977
+class wall weight: 0.167, IoU: 0.697
+class beam weight: 0.280, IoU: 0.000
+class column weight: 0.000, IoU: 0.057
+class window weight: 0.019, IoU: 0.489
+class door weight: 0.033, IoU: 0.115
+class table weight: 0.030, IoU: 0.492
+class chair weight: 0.038, IoU: 0.421
+class sofa weight: 0.019, IoU: 0.041
+class bookcase weight: 0.003, IoU: 0.464
+class board weight: 0.108, IoU: 0.362
+class clutter weight: 0.012, IoU: 0.313
+
+2021-03-21 22:07:01,937 - Model - INFO - Eval mean loss: 1.229263
+2021-03-21 22:07:01,937 - Model - INFO - Eval accuracy: 0.768151
+2021-03-21 22:07:01,937 - Model - INFO - Best mIoU: 0.419681
+2021-03-21 22:07:01,937 - Model - INFO - **** Epoch 43 (43/128) ****
+2021-03-21 22:07:01,937 - Model - INFO - Learning rate:0.000240
+2021-03-21 22:14:10,516 - Model - INFO - Training mean loss: 0.186245
+2021-03-21 22:14:10,516 - Model - INFO - Training accuracy: 0.937270
+2021-03-21 22:14:10,960 - Model - INFO - ---- EPOCH 043 EVALUATION ----
+2021-03-21 22:16:02,276 - Model - INFO - eval mean loss: 1.129443
+2021-03-21 22:16:02,276 - Model - INFO - eval point avg class IoU: 0.427449
+2021-03-21 22:16:02,276 - Model - INFO - eval point accuracy: 0.766908
+2021-03-21 22:16:02,276 - Model - INFO - eval point avg class acc: 0.544388
+2021-03-21 22:16:02,277 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.093, IoU: 0.859
+class floor weight: 0.201, IoU: 0.975
+class wall weight: 0.169, IoU: 0.686
+class beam weight: 0.276, IoU: 0.002
+class column weight: 0.000, IoU: 0.067
+class window weight: 0.019, IoU: 0.518
+class door weight: 0.032, IoU: 0.282
+class table weight: 0.032, IoU: 0.560
+class chair weight: 0.038, IoU: 0.390
+class sofa weight: 0.019, IoU: 0.027
+class bookcase weight: 0.003, IoU: 0.502
+class board weight: 0.106, IoU: 0.388
+class clutter weight: 0.011, IoU: 0.302
+
+2021-03-21 22:16:02,277 - Model - INFO - Eval mean loss: 1.129443
+2021-03-21 22:16:02,277 - Model - INFO - Eval accuracy: 0.766908
+2021-03-21 22:16:02,277 - Model - INFO - Save model...
+2021-03-21 22:16:02,277 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/best_model.pth
+2021-03-21 22:16:02,430 - Model - INFO - Saving model....
+2021-03-21 22:16:02,430 - Model - INFO - Best mIoU: 0.427449
+2021-03-21 22:16:02,430 - Model - INFO - **** Epoch 44 (44/128) ****
+2021-03-21 22:16:02,430 - Model - INFO - Learning rate:0.000240
+2021-03-21 22:23:09,404 - Model - INFO - Training mean loss: 0.186955
+2021-03-21 22:23:09,404 - Model - INFO - Training accuracy: 0.937080
+2021-03-21 22:23:09,862 - Model - INFO - ---- EPOCH 044 EVALUATION ----
+2021-03-21 22:24:52,522 - Model - INFO - eval mean loss: 1.177946
+2021-03-21 22:24:52,522 - Model - INFO - eval point avg class IoU: 0.408625
+2021-03-21 22:24:52,522 - Model - INFO - eval point accuracy: 0.761867
+2021-03-21 22:24:52,523 - Model - INFO - eval point avg class acc: 0.526136
+2021-03-21 22:24:52,523 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.089, IoU: 0.818
+class floor weight: 0.202, IoU: 0.965
+class wall weight: 0.169, IoU: 0.704
+class beam weight: 0.279, IoU: 0.002
+class column weight: 0.000, IoU: 0.066
+class window weight: 0.018, IoU: 0.506
+class door weight: 0.033, IoU: 0.115
+class table weight: 0.031, IoU: 0.543
+class chair weight: 0.039, IoU: 0.415
+class sofa weight: 0.018, IoU: 0.062
+class bookcase weight: 0.002, IoU: 0.506
+class board weight: 0.108, IoU: 0.309
+class clutter weight: 0.011, IoU: 0.303
+
+2021-03-21 22:24:52,524 - Model - INFO - Eval mean loss: 1.177946
+2021-03-21 22:24:52,524 - Model - INFO - Eval accuracy: 0.761867
+2021-03-21 22:24:52,524 - Model - INFO - Best mIoU: 0.427449
+2021-03-21 22:24:52,524 - Model - INFO - **** Epoch 45 (45/128) ****
+2021-03-21 22:24:52,524 - Model - INFO - Learning rate:0.000240
+2021-03-21 22:31:54,288 - Model - INFO - Training mean loss: 0.186426
+2021-03-21 22:31:54,289 - Model - INFO - Training accuracy: 0.937288
+2021-03-21 22:31:54,773 - Model - INFO - ---- EPOCH 045 EVALUATION ----
+2021-03-21 22:33:31,605 - Model - INFO - eval mean loss: 1.184925
+2021-03-21 22:33:31,605 - Model - INFO - eval point avg class IoU: 0.406702
+2021-03-21 22:33:31,605 - Model - INFO - eval point accuracy: 0.759311
+2021-03-21 22:33:31,606 - Model - INFO - eval point avg class acc: 0.542781
+2021-03-21 22:33:31,606 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.848
+class floor weight: 0.200, IoU: 0.969
+class wall weight: 0.166, IoU: 0.692
+class beam weight: 0.280, IoU: 0.002
+class column weight: 0.000, IoU: 0.071
+class window weight: 0.019, IoU: 0.493
+class door weight: 0.035, IoU: 0.170
+class table weight: 0.029, IoU: 0.511
+class chair weight: 0.039, IoU: 0.347
+class sofa weight: 0.019, IoU: 0.113
+class bookcase weight: 0.003, IoU: 0.466
+class board weight: 0.107, IoU: 0.294
+class clutter weight: 0.012, IoU: 0.314
+
+2021-03-21 22:33:31,606 - Model - INFO - Eval mean loss: 1.184925
+2021-03-21 22:33:31,606 - Model - INFO - Eval accuracy: 0.759311
+2021-03-21 22:33:31,606 - Model - INFO - Best mIoU: 0.427449
+2021-03-21 22:33:31,606 - Model - INFO - **** Epoch 46 (46/128) ****
+2021-03-21 22:33:31,606 - Model - INFO - Learning rate:0.000240
+2021-03-21 22:40:32,055 - Model - INFO - Training mean loss: 0.184488
+2021-03-21 22:40:32,056 - Model - INFO - Training accuracy: 0.937930
+2021-03-21 22:40:32,056 - Model - INFO - Save model...
+2021-03-21 22:40:32,056 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/model.pth
+2021-03-21 22:40:32,232 - Model - INFO - Saving model....
+2021-03-21 22:40:32,678 - Model - INFO - ---- EPOCH 046 EVALUATION ----
+2021-03-21 22:42:11,072 - Model - INFO - eval mean loss: 1.179590
+2021-03-21 22:42:11,072 - Model - INFO - eval point avg class IoU: 0.423073
+2021-03-21 22:42:11,072 - Model - INFO - eval point accuracy: 0.775935
+2021-03-21 22:42:11,072 - Model - INFO - eval point avg class acc: 0.530816
+2021-03-21 22:42:11,073 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.869
+class floor weight: 0.199, IoU: 0.975
+class wall weight: 0.165, IoU: 0.708
+class beam weight: 0.282, IoU: 0.001
+class column weight: 0.000, IoU: 0.044
+class window weight: 0.018, IoU: 0.508
+class door weight: 0.035, IoU: 0.228
+class table weight: 0.028, IoU: 0.549
+class chair weight: 0.039, IoU: 0.417
+class sofa weight: 0.019, IoU: 0.079
+class bookcase weight: 0.003, IoU: 0.431
+class board weight: 0.107, IoU: 0.369
+class clutter weight: 0.012, IoU: 0.322
+
+2021-03-21 22:42:11,073 - Model - INFO - Eval mean loss: 1.179590
+2021-03-21 22:42:11,073 - Model - INFO - Eval accuracy: 0.775935
+2021-03-21 22:42:11,074 - Model - INFO - Best mIoU: 0.427449
+2021-03-21 22:42:11,074 - Model - INFO - **** Epoch 47 (47/128) ****
+2021-03-21 22:42:11,074 - Model - INFO - Learning rate:0.000240
+2021-03-21 22:49:13,498 - Model - INFO - Training mean loss: 0.184445
+2021-03-21 22:49:13,498 - Model - INFO - Training accuracy: 0.937824
+2021-03-21 22:49:13,858 - Model - INFO - ---- EPOCH 047 EVALUATION ----
+2021-03-21 22:50:54,874 - Model - INFO - eval mean loss: 1.221474
+2021-03-21 22:50:54,874 - Model - INFO - eval point avg class IoU: 0.409497
+2021-03-21 22:50:54,874 - Model - INFO - eval point accuracy: 0.768707
+2021-03-21 22:50:54,874 - Model - INFO - eval point avg class acc: 0.506535
+2021-03-21 22:50:54,874 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.831
+class floor weight: 0.199, IoU: 0.974
+class wall weight: 0.166, IoU: 0.702
+class beam weight: 0.284, IoU: 0.001
+class column weight: 0.000, IoU: 0.084
+class window weight: 0.018, IoU: 0.467
+class door weight: 0.034, IoU: 0.142
+class table weight: 0.030, IoU: 0.540
+class chair weight: 0.039, IoU: 0.397
+class sofa weight: 0.018, IoU: 0.065
+class bookcase weight: 0.002, IoU: 0.454
+class board weight: 0.106, IoU: 0.352
+class clutter weight: 0.012, IoU: 0.315
+
+2021-03-21 22:50:54,875 - Model - INFO - Eval mean loss: 1.221474
+2021-03-21 22:50:54,875 - Model - INFO - Eval accuracy: 0.768707
+2021-03-21 22:50:54,875 - Model - INFO - Best mIoU: 0.427449
+2021-03-21 22:50:54,875 - Model - INFO - **** Epoch 48 (48/128) ****
+2021-03-21 22:50:54,875 - Model - INFO - Learning rate:0.000240
+2021-03-21 22:57:59,611 - Model - INFO - Training mean loss: 0.184066
+2021-03-21 22:57:59,612 - Model - INFO - Training accuracy: 0.938086
+2021-03-21 22:58:00,054 - Model - INFO - ---- EPOCH 048 EVALUATION ----
+2021-03-21 22:59:40,998 - Model - INFO - eval mean loss: 1.302007
+2021-03-21 22:59:40,998 - Model - INFO - eval point avg class IoU: 0.400725
+2021-03-21 22:59:40,998 - Model - INFO - eval point accuracy: 0.756363
+2021-03-21 22:59:40,999 - Model - INFO - eval point avg class acc: 0.516480
+2021-03-21 22:59:40,999 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.864
+class floor weight: 0.199, IoU: 0.961
+class wall weight: 0.166, IoU: 0.682
+class beam weight: 0.281, IoU: 0.002
+class column weight: 0.000, IoU: 0.067
+class window weight: 0.019, IoU: 0.477
+class door weight: 0.034, IoU: 0.112
+class table weight: 0.030, IoU: 0.508
+class chair weight: 0.039, IoU: 0.423
+class sofa weight: 0.018, IoU: 0.097
+class bookcase weight: 0.003, IoU: 0.429
+class board weight: 0.108, IoU: 0.272
+class clutter weight: 0.012, IoU: 0.315
+
+2021-03-21 22:59:41,000 - Model - INFO - Eval mean loss: 1.302007
+2021-03-21 22:59:41,000 - Model - INFO - Eval accuracy: 0.756363
+2021-03-21 22:59:41,000 - Model - INFO - Best mIoU: 0.427449
+2021-03-21 22:59:41,000 - Model - INFO - **** Epoch 49 (49/128) ****
+2021-03-21 22:59:41,000 - Model - INFO - Learning rate:0.000240
+2021-03-21 23:06:44,745 - Model - INFO - Training mean loss: 0.184479
+2021-03-21 23:06:44,745 - Model - INFO - Training accuracy: 0.937883
+2021-03-21 23:06:45,186 - Model - INFO - ---- EPOCH 049 EVALUATION ----
+2021-03-21 23:08:20,775 - Model - INFO - eval mean loss: 1.280587
+2021-03-21 23:08:20,776 - Model - INFO - eval point avg class IoU: 0.404443
+2021-03-21 23:08:20,776 - Model - INFO - eval point accuracy: 0.766548
+2021-03-21 23:08:20,776 - Model - INFO - eval point avg class acc: 0.507672
+2021-03-21 23:08:20,777 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.841
+class floor weight: 0.200, IoU: 0.974
+class wall weight: 0.167, IoU: 0.692
+class beam weight: 0.280, IoU: 0.002
+class column weight: 0.000, IoU: 0.061
+class window weight: 0.018, IoU: 0.516
+class door weight: 0.032, IoU: 0.161
+class table weight: 0.031, IoU: 0.473
+class chair weight: 0.039, IoU: 0.425
+class sofa weight: 0.019, IoU: 0.009
+class bookcase weight: 0.003, IoU: 0.475
+class board weight: 0.108, IoU: 0.319
+class clutter weight: 0.011, IoU: 0.311
+
+2021-03-21 23:08:20,777 - Model - INFO - Eval mean loss: 1.280587
+2021-03-21 23:08:20,777 - Model - INFO - Eval accuracy: 0.766548
+2021-03-21 23:08:20,777 - Model - INFO - Best mIoU: 0.427449
+2021-03-21 23:08:20,778 - Model - INFO - **** Epoch 50 (50/128) ****
+2021-03-21 23:08:20,778 - Model - INFO - Learning rate:0.000240
+2021-03-21 23:15:25,443 - Model - INFO - Training mean loss: 0.182946
+2021-03-21 23:15:25,443 - Model - INFO - Training accuracy: 0.938414
+2021-03-21 23:15:25,835 - Model - INFO - ---- EPOCH 050 EVALUATION ----
+2021-03-21 23:17:04,729 - Model - INFO - eval mean loss: 1.249160
+2021-03-21 23:17:04,729 - Model - INFO - eval point avg class IoU: 0.420928
+2021-03-21 23:17:04,729 - Model - INFO - eval point accuracy: 0.768273
+2021-03-21 23:17:04,730 - Model - INFO - eval point avg class acc: 0.519307
+2021-03-21 23:17:04,730 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.858
+class floor weight: 0.202, IoU: 0.973
+class wall weight: 0.168, IoU: 0.687
+class beam weight: 0.277, IoU: 0.001
+class column weight: 0.000, IoU: 0.108
+class window weight: 0.018, IoU: 0.499
+class door weight: 0.032, IoU: 0.091
+class table weight: 0.031, IoU: 0.559
+class chair weight: 0.039, IoU: 0.428
+class sofa weight: 0.019, IoU: 0.093
+class bookcase weight: 0.003, IoU: 0.457
+class board weight: 0.109, IoU: 0.395
+class clutter weight: 0.012, IoU: 0.324
+
+2021-03-21 23:17:04,730 - Model - INFO - Eval mean loss: 1.249160
+2021-03-21 23:17:04,730 - Model - INFO - Eval accuracy: 0.768273
+2021-03-21 23:17:04,730 - Model - INFO - Best mIoU: 0.427449
+2021-03-21 23:17:04,730 - Model - INFO - **** Epoch 51 (51/128) ****
+2021-03-21 23:17:04,730 - Model - INFO - Learning rate:0.000168
+2021-03-21 23:24:06,179 - Model - INFO - Training mean loss: 0.172630
+2021-03-21 23:24:06,180 - Model - INFO - Training accuracy: 0.941684
+2021-03-21 23:24:06,180 - Model - INFO - Save model...
+2021-03-21 23:24:06,180 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/model.pth
+2021-03-21 23:24:06,327 - Model - INFO - Saving model....
+2021-03-21 23:24:06,759 - Model - INFO - ---- EPOCH 051 EVALUATION ----
+2021-03-21 23:25:41,590 - Model - INFO - eval mean loss: 1.153260
+2021-03-21 23:25:41,590 - Model - INFO - eval point avg class IoU: 0.411659
+2021-03-21 23:25:41,591 - Model - INFO - eval point accuracy: 0.752344
+2021-03-21 23:25:41,591 - Model - INFO - eval point avg class acc: 0.532628
+2021-03-21 23:25:41,591 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.093, IoU: 0.840
+class floor weight: 0.200, IoU: 0.970
+class wall weight: 0.166, IoU: 0.676
+class beam weight: 0.278, IoU: 0.002
+class column weight: 0.000, IoU: 0.097
+class window weight: 0.019, IoU: 0.538
+class door weight: 0.032, IoU: 0.153
+class table weight: 0.031, IoU: 0.542
+class chair weight: 0.038, IoU: 0.401
+class sofa weight: 0.018, IoU: 0.043
+class bookcase weight: 0.003, IoU: 0.476
+class board weight: 0.109, IoU: 0.296
+class clutter weight: 0.012, IoU: 0.318
+
+2021-03-21 23:25:41,592 - Model - INFO - Eval mean loss: 1.153260
+2021-03-21 23:25:41,592 - Model - INFO - Eval accuracy: 0.752344
+2021-03-21 23:25:41,593 - Model - INFO - Best mIoU: 0.427449
+2021-03-21 23:25:41,593 - Model - INFO - **** Epoch 52 (52/128) ****
+2021-03-21 23:25:41,593 - Model - INFO - Learning rate:0.000168
+2021-03-21 23:32:38,992 - Model - INFO - Training mean loss: 0.171264
+2021-03-21 23:32:38,992 - Model - INFO - Training accuracy: 0.942481
+2021-03-21 23:32:39,381 - Model - INFO - ---- EPOCH 052 EVALUATION ----
+2021-03-21 23:34:11,295 - Model - INFO - eval mean loss: 1.177021
+2021-03-21 23:34:11,296 - Model - INFO - eval point avg class IoU: 0.413085
+2021-03-21 23:34:11,296 - Model - INFO - eval point accuracy: 0.760961
+2021-03-21 23:34:11,296 - Model - INFO - eval point avg class acc: 0.531727
+2021-03-21 23:34:11,296 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.848
+class floor weight: 0.203, IoU: 0.951
+class wall weight: 0.169, IoU: 0.678
+class beam weight: 0.275, IoU: 0.002
+class column weight: 0.000, IoU: 0.111
+class window weight: 0.020, IoU: 0.493
+class door weight: 0.033, IoU: 0.131
+class table weight: 0.030, IoU: 0.519
+class chair weight: 0.039, IoU: 0.444
+class sofa weight: 0.020, IoU: 0.047
+class bookcase weight: 0.003, IoU: 0.483
+class board weight: 0.108, IoU: 0.343
+class clutter weight: 0.012, IoU: 0.319
+
+2021-03-21 23:34:11,297 - Model - INFO - Eval mean loss: 1.177021
+2021-03-21 23:34:11,297 - Model - INFO - Eval accuracy: 0.760961
+2021-03-21 23:34:11,297 - Model - INFO - Best mIoU: 0.427449
+2021-03-21 23:34:11,297 - Model - INFO - **** Epoch 53 (53/128) ****
+2021-03-21 23:34:11,297 - Model - INFO - Learning rate:0.000168
+2021-03-21 23:41:09,163 - Model - INFO - Training mean loss: 0.170586
+2021-03-21 23:41:09,163 - Model - INFO - Training accuracy: 0.942486
+2021-03-21 23:41:09,684 - Model - INFO - ---- EPOCH 053 EVALUATION ----
+2021-03-21 23:42:41,790 - Model - INFO - eval mean loss: 1.201750
+2021-03-21 23:42:41,790 - Model - INFO - eval point avg class IoU: 0.420179
+2021-03-21 23:42:41,790 - Model - INFO - eval point accuracy: 0.775488
+2021-03-21 23:42:41,790 - Model - INFO - eval point avg class acc: 0.519761
+2021-03-21 23:42:41,790 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.858
+class floor weight: 0.200, IoU: 0.977
+class wall weight: 0.167, IoU: 0.711
+class beam weight: 0.281, IoU: 0.002
+class column weight: 0.000, IoU: 0.108
+class window weight: 0.018, IoU: 0.518
+class door weight: 0.033, IoU: 0.120
+class table weight: 0.029, IoU: 0.537
+class chair weight: 0.037, IoU: 0.415
+class sofa weight: 0.019, IoU: 0.044
+class bookcase weight: 0.003, IoU: 0.484
+class board weight: 0.108, IoU: 0.368
+class clutter weight: 0.012, IoU: 0.322
+
+2021-03-21 23:42:41,790 - Model - INFO - Eval mean loss: 1.201750
+2021-03-21 23:42:41,790 - Model - INFO - Eval accuracy: 0.775488
+2021-03-21 23:42:41,790 - Model - INFO - Best mIoU: 0.427449
+2021-03-21 23:42:41,791 - Model - INFO - **** Epoch 54 (54/128) ****
+2021-03-21 23:42:41,791 - Model - INFO - Learning rate:0.000168
+2021-03-21 23:49:45,582 - Model - INFO - Training mean loss: 0.169737
+2021-03-21 23:49:45,582 - Model - INFO - Training accuracy: 0.942663
+2021-03-21 23:49:46,066 - Model - INFO - ---- EPOCH 054 EVALUATION ----
+2021-03-21 23:51:23,358 - Model - INFO - eval mean loss: 1.196179
+2021-03-21 23:51:23,358 - Model - INFO - eval point avg class IoU: 0.419435
+2021-03-21 23:51:23,358 - Model - INFO - eval point accuracy: 0.776544
+2021-03-21 23:51:23,359 - Model - INFO - eval point avg class acc: 0.505746
+2021-03-21 23:51:23,359 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.868
+class floor weight: 0.200, IoU: 0.976
+class wall weight: 0.167, IoU: 0.703
+class beam weight: 0.279, IoU: 0.001
+class column weight: 0.000, IoU: 0.070
+class window weight: 0.018, IoU: 0.477
+class door weight: 0.032, IoU: 0.111
+class table weight: 0.031, IoU: 0.548
+class chair weight: 0.040, IoU: 0.398
+class sofa weight: 0.018, IoU: 0.059
+class bookcase weight: 0.003, IoU: 0.492
+class board weight: 0.109, IoU: 0.412
+class clutter weight: 0.012, IoU: 0.340
+
+2021-03-21 23:51:23,359 - Model - INFO - Eval mean loss: 1.196179
+2021-03-21 23:51:23,359 - Model - INFO - Eval accuracy: 0.776544
+2021-03-21 23:51:23,359 - Model - INFO - Best mIoU: 0.427449
+2021-03-21 23:51:23,359 - Model - INFO - **** Epoch 55 (55/128) ****
+2021-03-21 23:51:23,359 - Model - INFO - Learning rate:0.000168
+2021-03-21 23:58:27,940 - Model - INFO - Training mean loss: 0.170451
+2021-03-21 23:58:27,941 - Model - INFO - Training accuracy: 0.942408
+2021-03-21 23:58:28,329 - Model - INFO - ---- EPOCH 055 EVALUATION ----
+2021-03-22 00:00:06,399 - Model - INFO - eval mean loss: 1.164336
+2021-03-22 00:00:06,399 - Model - INFO - eval point avg class IoU: 0.421381
+2021-03-22 00:00:06,400 - Model - INFO - eval point accuracy: 0.778996
+2021-03-22 00:00:06,400 - Model - INFO - eval point avg class acc: 0.529492
+2021-03-22 00:00:06,400 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.868
+class floor weight: 0.201, IoU: 0.961
+class wall weight: 0.168, IoU: 0.722
+class beam weight: 0.280, IoU: 0.001
+class column weight: 0.000, IoU: 0.087
+class window weight: 0.018, IoU: 0.559
+class door weight: 0.033, IoU: 0.161
+class table weight: 0.029, IoU: 0.541
+class chair weight: 0.039, IoU: 0.396
+class sofa weight: 0.019, IoU: 0.051
+class bookcase weight: 0.003, IoU: 0.458
+class board weight: 0.107, IoU: 0.356
+class clutter weight: 0.011, IoU: 0.317
+
+2021-03-22 00:00:06,401 - Model - INFO - Eval mean loss: 1.164336
+2021-03-22 00:00:06,402 - Model - INFO - Eval accuracy: 0.778996
+2021-03-22 00:00:06,402 - Model - INFO - Best mIoU: 0.427449
+2021-03-22 00:00:06,402 - Model - INFO - **** Epoch 56 (56/128) ****
+2021-03-22 00:00:06,402 - Model - INFO - Learning rate:0.000168
+2021-03-22 00:07:08,910 - Model - INFO - Training mean loss: 0.169841
+2021-03-22 00:07:08,910 - Model - INFO - Training accuracy: 0.942632
+2021-03-22 00:07:08,910 - Model - INFO - Save model...
+2021-03-22 00:07:08,910 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/model.pth
+2021-03-22 00:07:09,384 - Model - INFO - Saving model....
+2021-03-22 00:07:09,823 - Model - INFO - ---- EPOCH 056 EVALUATION ----
+2021-03-22 00:08:46,877 - Model - INFO - eval mean loss: 1.278948
+2021-03-22 00:08:46,878 - Model - INFO - eval point avg class IoU: 0.407592
+2021-03-22 00:08:46,878 - Model - INFO - eval point accuracy: 0.763481
+2021-03-22 00:08:46,878 - Model - INFO - eval point avg class acc: 0.527807
+2021-03-22 00:08:46,878 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.861
+class floor weight: 0.199, IoU: 0.975
+class wall weight: 0.167, IoU: 0.692
+class beam weight: 0.281, IoU: 0.001
+class column weight: 0.000, IoU: 0.074
+class window weight: 0.019, IoU: 0.509
+class door weight: 0.034, IoU: 0.130
+class table weight: 0.031, IoU: 0.572
+class chair weight: 0.038, IoU: 0.387
+class sofa weight: 0.019, IoU: 0.070
+class bookcase weight: 0.003, IoU: 0.409
+class board weight: 0.107, IoU: 0.308
+class clutter weight: 0.012, IoU: 0.309
+
+2021-03-22 00:08:46,879 - Model - INFO - Eval mean loss: 1.278948
+2021-03-22 00:08:46,879 - Model - INFO - Eval accuracy: 0.763481
+2021-03-22 00:08:46,879 - Model - INFO - Best mIoU: 0.427449
+2021-03-22 00:08:46,879 - Model - INFO - **** Epoch 57 (57/128) ****
+2021-03-22 00:08:46,879 - Model - INFO - Learning rate:0.000168
+2021-03-22 00:15:55,836 - Model - INFO - Training mean loss: 0.169388
+2021-03-22 00:15:55,836 - Model - INFO - Training accuracy: 0.942695
+2021-03-22 00:15:56,279 - Model - INFO - ---- EPOCH 057 EVALUATION ----
+2021-03-22 00:17:35,250 - Model - INFO - eval mean loss: 1.177549
+2021-03-22 00:17:35,250 - Model - INFO - eval point avg class IoU: 0.420743
+2021-03-22 00:17:35,250 - Model - INFO - eval point accuracy: 0.778748
+2021-03-22 00:17:35,250 - Model - INFO - eval point avg class acc: 0.528279
+2021-03-22 00:17:35,251 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.866
+class floor weight: 0.200, IoU: 0.972
+class wall weight: 0.167, IoU: 0.705
+class beam weight: 0.283, IoU: 0.002
+class column weight: 0.000, IoU: 0.088
+class window weight: 0.016, IoU: 0.540
+class door weight: 0.032, IoU: 0.049
+class table weight: 0.031, IoU: 0.550
+class chair weight: 0.039, IoU: 0.415
+class sofa weight: 0.019, IoU: 0.077
+class bookcase weight: 0.003, IoU: 0.471
+class board weight: 0.106, IoU: 0.391
+class clutter weight: 0.012, IoU: 0.343
+
+2021-03-22 00:17:35,252 - Model - INFO - Eval mean loss: 1.177549
+2021-03-22 00:17:35,252 - Model - INFO - Eval accuracy: 0.778748
+2021-03-22 00:17:35,252 - Model - INFO - Best mIoU: 0.427449
+2021-03-22 00:17:35,252 - Model - INFO - **** Epoch 58 (58/128) ****
+2021-03-22 00:17:35,252 - Model - INFO - Learning rate:0.000168
+2021-03-22 00:24:43,123 - Model - INFO - Training mean loss: 0.168718
+2021-03-22 00:24:43,124 - Model - INFO - Training accuracy: 0.943006
+2021-03-22 00:24:43,607 - Model - INFO - ---- EPOCH 058 EVALUATION ----
+2021-03-22 00:26:23,497 - Model - INFO - eval mean loss: 1.254452
+2021-03-22 00:26:23,497 - Model - INFO - eval point avg class IoU: 0.414903
+2021-03-22 00:26:23,497 - Model - INFO - eval point accuracy: 0.776370
+2021-03-22 00:26:23,498 - Model - INFO - eval point avg class acc: 0.517535
+2021-03-22 00:26:23,498 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.864
+class floor weight: 0.201, IoU: 0.966
+class wall weight: 0.167, IoU: 0.703
+class beam weight: 0.277, IoU: 0.000
+class column weight: 0.000, IoU: 0.081
+class window weight: 0.018, IoU: 0.561
+class door weight: 0.035, IoU: 0.097
+class table weight: 0.030, IoU: 0.539
+class chair weight: 0.039, IoU: 0.389
+class sofa weight: 0.019, IoU: 0.055
+class bookcase weight: 0.003, IoU: 0.452
+class board weight: 0.107, IoU: 0.359
+class clutter weight: 0.012, IoU: 0.329
+
+2021-03-22 00:26:23,498 - Model - INFO - Eval mean loss: 1.254452
+2021-03-22 00:26:23,498 - Model - INFO - Eval accuracy: 0.776370
+2021-03-22 00:26:23,498 - Model - INFO - Best mIoU: 0.427449
+2021-03-22 00:26:23,498 - Model - INFO - **** Epoch 59 (59/128) ****
+2021-03-22 00:26:23,499 - Model - INFO - Learning rate:0.000168
+2021-03-22 00:33:32,178 - Model - INFO - Training mean loss: 0.168756
+2021-03-22 00:33:32,179 - Model - INFO - Training accuracy: 0.942867
+2021-03-22 00:33:32,571 - Model - INFO - ---- EPOCH 059 EVALUATION ----
+2021-03-22 00:35:10,738 - Model - INFO - eval mean loss: 1.174311
+2021-03-22 00:35:10,738 - Model - INFO - eval point avg class IoU: 0.420092
+2021-03-22 00:35:10,738 - Model - INFO - eval point accuracy: 0.775706
+2021-03-22 00:35:10,739 - Model - INFO - eval point avg class acc: 0.529260
+2021-03-22 00:35:10,739 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.088, IoU: 0.860
+class floor weight: 0.201, IoU: 0.975
+class wall weight: 0.168, IoU: 0.710
+class beam weight: 0.279, IoU: 0.001
+class column weight: 0.000, IoU: 0.086
+class window weight: 0.019, IoU: 0.518
+class door weight: 0.034, IoU: 0.132
+class table weight: 0.030, IoU: 0.567
+class chair weight: 0.038, IoU: 0.402
+class sofa weight: 0.020, IoU: 0.085
+class bookcase weight: 0.003, IoU: 0.470
+class board weight: 0.109, IoU: 0.321
+class clutter weight: 0.012, IoU: 0.335
+
+2021-03-22 00:35:10,739 - Model - INFO - Eval mean loss: 1.174311
+2021-03-22 00:35:10,739 - Model - INFO - Eval accuracy: 0.775706
+2021-03-22 00:35:10,739 - Model - INFO - Best mIoU: 0.427449
+2021-03-22 00:35:10,739 - Model - INFO - **** Epoch 60 (60/128) ****
+2021-03-22 00:35:10,739 - Model - INFO - Learning rate:0.000168
+2021-03-22 00:42:19,835 - Model - INFO - Training mean loss: 0.167437
+2021-03-22 00:42:19,835 - Model - INFO - Training accuracy: 0.943363
+2021-03-22 00:42:20,324 - Model - INFO - ---- EPOCH 060 EVALUATION ----
+2021-03-22 00:44:07,682 - Model - INFO - eval mean loss: 1.190635
+2021-03-22 00:44:07,682 - Model - INFO - eval point avg class IoU: 0.431278
+2021-03-22 00:44:07,682 - Model - INFO - eval point accuracy: 0.786634
+2021-03-22 00:44:07,682 - Model - INFO - eval point avg class acc: 0.526931
+2021-03-22 00:44:07,682 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.879
+class floor weight: 0.201, IoU: 0.978
+class wall weight: 0.167, IoU: 0.726
+class beam weight: 0.278, IoU: 0.001
+class column weight: 0.000, IoU: 0.057
+class window weight: 0.019, IoU: 0.559
+class door weight: 0.034, IoU: 0.175
+class table weight: 0.031, IoU: 0.556
+class chair weight: 0.039, IoU: 0.421
+class sofa weight: 0.019, IoU: 0.070
+class bookcase weight: 0.003, IoU: 0.441
+class board weight: 0.108, IoU: 0.417
+class clutter weight: 0.011, IoU: 0.327
+
+2021-03-22 00:44:07,683 - Model - INFO - Eval mean loss: 1.190635
+2021-03-22 00:44:07,683 - Model - INFO - Eval accuracy: 0.786634
+2021-03-22 00:44:07,683 - Model - INFO - Save model...
+2021-03-22 00:44:07,683 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/best_model.pth
+2021-03-22 00:44:07,851 - Model - INFO - Saving model....
+2021-03-22 00:44:07,851 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 00:44:07,852 - Model - INFO - **** Epoch 61 (61/128) ****
+2021-03-22 00:44:07,852 - Model - INFO - Learning rate:0.000118
+2021-03-22 00:51:17,428 - Model - INFO - Training mean loss: 0.160372
+2021-03-22 00:51:17,428 - Model - INFO - Training accuracy: 0.945843
+2021-03-22 00:51:17,428 - Model - INFO - Save model...
+2021-03-22 00:51:17,428 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/model.pth
+2021-03-22 00:51:17,568 - Model - INFO - Saving model....
+2021-03-22 00:51:17,965 - Model - INFO - ---- EPOCH 061 EVALUATION ----
+2021-03-22 00:52:58,975 - Model - INFO - eval mean loss: 1.120125
+2021-03-22 00:52:58,976 - Model - INFO - eval point avg class IoU: 0.426392
+2021-03-22 00:52:58,976 - Model - INFO - eval point accuracy: 0.776839
+2021-03-22 00:52:58,976 - Model - INFO - eval point avg class acc: 0.534409
+2021-03-22 00:52:58,977 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.844
+class floor weight: 0.199, IoU: 0.975
+class wall weight: 0.166, IoU: 0.708
+class beam weight: 0.282, IoU: 0.001
+class column weight: 0.000, IoU: 0.103
+class window weight: 0.019, IoU: 0.548
+class door weight: 0.034, IoU: 0.212
+class table weight: 0.029, IoU: 0.558
+class chair weight: 0.040, IoU: 0.407
+class sofa weight: 0.019, IoU: 0.062
+class bookcase weight: 0.003, IoU: 0.478
+class board weight: 0.107, IoU: 0.304
+class clutter weight: 0.012, IoU: 0.344
+
+2021-03-22 00:52:58,978 - Model - INFO - Eval mean loss: 1.120125
+2021-03-22 00:52:58,978 - Model - INFO - Eval accuracy: 0.776839
+2021-03-22 00:52:58,978 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 00:52:58,978 - Model - INFO - **** Epoch 62 (62/128) ****
+2021-03-22 00:52:58,978 - Model - INFO - Learning rate:0.000118
+2021-03-22 01:00:05,480 - Model - INFO - Training mean loss: 0.158178
+2021-03-22 01:00:05,481 - Model - INFO - Training accuracy: 0.946524
+2021-03-22 01:00:05,878 - Model - INFO - ---- EPOCH 062 EVALUATION ----
+2021-03-22 01:01:51,043 - Model - INFO - eval mean loss: 1.142635
+2021-03-22 01:01:51,044 - Model - INFO - eval point avg class IoU: 0.421424
+2021-03-22 01:01:51,044 - Model - INFO - eval point accuracy: 0.779774
+2021-03-22 01:01:51,044 - Model - INFO - eval point avg class acc: 0.522277
+2021-03-22 01:01:51,044 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.877
+class floor weight: 0.200, IoU: 0.970
+class wall weight: 0.168, IoU: 0.707
+class beam weight: 0.279, IoU: 0.001
+class column weight: 0.000, IoU: 0.083
+class window weight: 0.018, IoU: 0.527
+class door weight: 0.033, IoU: 0.122
+class table weight: 0.029, IoU: 0.571
+class chair weight: 0.039, IoU: 0.384
+class sofa weight: 0.018, IoU: 0.077
+class bookcase weight: 0.003, IoU: 0.486
+class board weight: 0.109, IoU: 0.329
+class clutter weight: 0.012, IoU: 0.345
+
+2021-03-22 01:01:51,046 - Model - INFO - Eval mean loss: 1.142635
+2021-03-22 01:01:51,046 - Model - INFO - Eval accuracy: 0.779774
+2021-03-22 01:01:51,046 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 01:01:51,046 - Model - INFO - **** Epoch 63 (63/128) ****
+2021-03-22 01:01:51,046 - Model - INFO - Learning rate:0.000118
+2021-03-22 01:08:58,028 - Model - INFO - Training mean loss: 0.159380
+2021-03-22 01:08:58,028 - Model - INFO - Training accuracy: 0.945980
+2021-03-22 01:08:58,435 - Model - INFO - ---- EPOCH 063 EVALUATION ----
+2021-03-22 01:10:51,144 - Model - INFO - eval mean loss: 1.198003
+2021-03-22 01:10:51,144 - Model - INFO - eval point avg class IoU: 0.419163
+2021-03-22 01:10:51,144 - Model - INFO - eval point accuracy: 0.774307
+2021-03-22 01:10:51,145 - Model - INFO - eval point avg class acc: 0.527969
+2021-03-22 01:10:51,145 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.848
+class floor weight: 0.200, IoU: 0.967
+class wall weight: 0.167, IoU: 0.712
+class beam weight: 0.279, IoU: 0.002
+class column weight: 0.000, IoU: 0.069
+class window weight: 0.018, IoU: 0.539
+class door weight: 0.033, IoU: 0.136
+class table weight: 0.030, IoU: 0.540
+class chair weight: 0.039, IoU: 0.426
+class sofa weight: 0.019, IoU: 0.079
+class bookcase weight: 0.003, IoU: 0.473
+class board weight: 0.108, IoU: 0.319
+class clutter weight: 0.011, IoU: 0.341
+
+2021-03-22 01:10:51,145 - Model - INFO - Eval mean loss: 1.198003
+2021-03-22 01:10:51,145 - Model - INFO - Eval accuracy: 0.774307
+2021-03-22 01:10:51,145 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 01:10:51,145 - Model - INFO - **** Epoch 64 (64/128) ****
+2021-03-22 01:10:51,146 - Model - INFO - Learning rate:0.000118
+2021-03-22 01:18:27,901 - Model - INFO - Training mean loss: 0.157282
+2021-03-22 01:18:27,902 - Model - INFO - Training accuracy: 0.946850
+2021-03-22 01:18:28,453 - Model - INFO - ---- EPOCH 064 EVALUATION ----
+2021-03-22 01:20:36,228 - Model - INFO - eval mean loss: 1.186273
+2021-03-22 01:20:36,228 - Model - INFO - eval point avg class IoU: 0.428371
+2021-03-22 01:20:36,228 - Model - INFO - eval point accuracy: 0.785901
+2021-03-22 01:20:36,228 - Model - INFO - eval point avg class acc: 0.533932
+2021-03-22 01:20:36,228 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.868
+class floor weight: 0.202, IoU: 0.971
+class wall weight: 0.167, IoU: 0.709
+class beam weight: 0.278, IoU: 0.001
+class column weight: 0.000, IoU: 0.082
+class window weight: 0.017, IoU: 0.523
+class door weight: 0.033, IoU: 0.164
+class table weight: 0.030, IoU: 0.565
+class chair weight: 0.040, IoU: 0.405
+class sofa weight: 0.018, IoU: 0.099
+class bookcase weight: 0.003, IoU: 0.481
+class board weight: 0.109, IoU: 0.347
+class clutter weight: 0.012, IoU: 0.354
+
+2021-03-22 01:20:36,229 - Model - INFO - Eval mean loss: 1.186273
+2021-03-22 01:20:36,229 - Model - INFO - Eval accuracy: 0.785901
+2021-03-22 01:20:36,229 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 01:20:36,229 - Model - INFO - **** Epoch 65 (65/128) ****
+2021-03-22 01:20:36,229 - Model - INFO - Learning rate:0.000118
+2021-03-22 01:29:04,120 - Model - INFO - Training mean loss: 0.155888
+2021-03-22 01:29:04,120 - Model - INFO - Training accuracy: 0.947201
+2021-03-22 01:29:04,667 - Model - INFO - ---- EPOCH 065 EVALUATION ----
+2021-03-22 01:30:59,945 - Model - INFO - eval mean loss: 1.184932
+2021-03-22 01:30:59,945 - Model - INFO - eval point avg class IoU: 0.422774
+2021-03-22 01:30:59,945 - Model - INFO - eval point accuracy: 0.778570
+2021-03-22 01:30:59,945 - Model - INFO - eval point avg class acc: 0.526838
+2021-03-22 01:30:59,946 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.868
+class floor weight: 0.200, IoU: 0.973
+class wall weight: 0.168, IoU: 0.713
+class beam weight: 0.280, IoU: 0.001
+class column weight: 0.000, IoU: 0.077
+class window weight: 0.018, IoU: 0.547
+class door weight: 0.030, IoU: 0.144
+class table weight: 0.031, IoU: 0.562
+class chair weight: 0.039, IoU: 0.433
+class sofa weight: 0.019, IoU: 0.065
+class bookcase weight: 0.003, IoU: 0.479
+class board weight: 0.109, IoU: 0.309
+class clutter weight: 0.012, IoU: 0.324
+
+2021-03-22 01:30:59,946 - Model - INFO - Eval mean loss: 1.184932
+2021-03-22 01:30:59,946 - Model - INFO - Eval accuracy: 0.778570
+2021-03-22 01:30:59,946 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 01:30:59,946 - Model - INFO - **** Epoch 66 (66/128) ****
+2021-03-22 01:30:59,946 - Model - INFO - Learning rate:0.000118
+2021-03-22 01:39:07,499 - Model - INFO - Training mean loss: 0.157296
+2021-03-22 01:39:07,500 - Model - INFO - Training accuracy: 0.946724
+2021-03-22 01:39:07,500 - Model - INFO - Save model...
+2021-03-22 01:39:07,500 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/model.pth
+2021-03-22 01:39:07,667 - Model - INFO - Saving model....
+2021-03-22 01:39:08,221 - Model - INFO - ---- EPOCH 066 EVALUATION ----
+2021-03-22 01:41:02,914 - Model - INFO - eval mean loss: 1.199343
+2021-03-22 01:41:02,914 - Model - INFO - eval point avg class IoU: 0.414925
+2021-03-22 01:41:02,914 - Model - INFO - eval point accuracy: 0.774976
+2021-03-22 01:41:02,914 - Model - INFO - eval point avg class acc: 0.517749
+2021-03-22 01:41:02,914 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.859
+class floor weight: 0.199, IoU: 0.975
+class wall weight: 0.167, IoU: 0.716
+class beam weight: 0.283, IoU: 0.001
+class column weight: 0.000, IoU: 0.104
+class window weight: 0.018, IoU: 0.538
+class door weight: 0.033, IoU: 0.074
+class table weight: 0.029, IoU: 0.550
+class chair weight: 0.038, IoU: 0.435
+class sofa weight: 0.019, IoU: 0.012
+class bookcase weight: 0.003, IoU: 0.456
+class board weight: 0.108, IoU: 0.345
+class clutter weight: 0.012, IoU: 0.330
+
+2021-03-22 01:41:02,915 - Model - INFO - Eval mean loss: 1.199343
+2021-03-22 01:41:02,915 - Model - INFO - Eval accuracy: 0.774976
+2021-03-22 01:41:02,915 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 01:41:02,915 - Model - INFO - **** Epoch 67 (67/128) ****
+2021-03-22 01:41:02,915 - Model - INFO - Learning rate:0.000118
+2021-03-22 01:49:09,875 - Model - INFO - Training mean loss: 0.157236
+2021-03-22 01:49:09,875 - Model - INFO - Training accuracy: 0.946839
+2021-03-22 01:49:10,376 - Model - INFO - ---- EPOCH 067 EVALUATION ----
+2021-03-22 01:51:03,434 - Model - INFO - eval mean loss: 1.177015
+2021-03-22 01:51:03,435 - Model - INFO - eval point avg class IoU: 0.421219
+2021-03-22 01:51:03,435 - Model - INFO - eval point accuracy: 0.772717
+2021-03-22 01:51:03,435 - Model - INFO - eval point avg class acc: 0.539157
+2021-03-22 01:51:03,435 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.851
+class floor weight: 0.201, IoU: 0.978
+class wall weight: 0.169, IoU: 0.705
+class beam weight: 0.278, IoU: 0.002
+class column weight: 0.000, IoU: 0.101
+class window weight: 0.017, IoU: 0.550
+class door weight: 0.034, IoU: 0.098
+class table weight: 0.032, IoU: 0.536
+class chair weight: 0.039, IoU: 0.438
+class sofa weight: 0.019, IoU: 0.094
+class bookcase weight: 0.002, IoU: 0.486
+class board weight: 0.108, IoU: 0.296
+class clutter weight: 0.010, IoU: 0.340
+
+2021-03-22 01:51:03,435 - Model - INFO - Eval mean loss: 1.177015
+2021-03-22 01:51:03,435 - Model - INFO - Eval accuracy: 0.772717
+2021-03-22 01:51:03,435 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 01:51:03,436 - Model - INFO - **** Epoch 68 (68/128) ****
+2021-03-22 01:51:03,436 - Model - INFO - Learning rate:0.000118
+2021-03-22 01:59:08,079 - Model - INFO - Training mean loss: 0.156431
+2021-03-22 01:59:08,080 - Model - INFO - Training accuracy: 0.946985
+2021-03-22 01:59:08,618 - Model - INFO - ---- EPOCH 068 EVALUATION ----
+2021-03-22 02:01:07,925 - Model - INFO - eval mean loss: 1.292198
+2021-03-22 02:01:07,926 - Model - INFO - eval point avg class IoU: 0.417736
+2021-03-22 02:01:07,926 - Model - INFO - eval point accuracy: 0.780429
+2021-03-22 02:01:07,926 - Model - INFO - eval point avg class acc: 0.515583
+2021-03-22 02:01:07,926 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.857
+class floor weight: 0.200, IoU: 0.974
+class wall weight: 0.167, IoU: 0.713
+class beam weight: 0.281, IoU: 0.000
+class column weight: 0.000, IoU: 0.076
+class window weight: 0.018, IoU: 0.528
+class door weight: 0.032, IoU: 0.122
+class table weight: 0.031, IoU: 0.550
+class chair weight: 0.039, IoU: 0.413
+class sofa weight: 0.019, IoU: 0.040
+class bookcase weight: 0.003, IoU: 0.448
+class board weight: 0.107, IoU: 0.375
+class clutter weight: 0.011, IoU: 0.335
+
+2021-03-22 02:01:07,928 - Model - INFO - Eval mean loss: 1.292198
+2021-03-22 02:01:07,928 - Model - INFO - Eval accuracy: 0.780429
+2021-03-22 02:01:07,928 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 02:01:07,928 - Model - INFO - **** Epoch 69 (69/128) ****
+2021-03-22 02:01:07,929 - Model - INFO - Learning rate:0.000118
+2021-03-22 02:09:25,534 - Model - INFO - Training mean loss: 0.156975
+2021-03-22 02:09:25,535 - Model - INFO - Training accuracy: 0.946865
+2021-03-22 02:09:26,101 - Model - INFO - ---- EPOCH 069 EVALUATION ----
+2021-03-22 02:11:25,094 - Model - INFO - eval mean loss: 1.261635
+2021-03-22 02:11:25,095 - Model - INFO - eval point avg class IoU: 0.410171
+2021-03-22 02:11:25,095 - Model - INFO - eval point accuracy: 0.771246
+2021-03-22 02:11:25,096 - Model - INFO - eval point avg class acc: 0.523924
+2021-03-22 02:11:25,096 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.861
+class floor weight: 0.199, IoU: 0.974
+class wall weight: 0.165, IoU: 0.704
+class beam weight: 0.282, IoU: 0.002
+class column weight: 0.000, IoU: 0.069
+class window weight: 0.018, IoU: 0.526
+class door weight: 0.033, IoU: 0.092
+class table weight: 0.030, IoU: 0.554
+class chair weight: 0.039, IoU: 0.401
+class sofa weight: 0.018, IoU: 0.048
+class bookcase weight: 0.003, IoU: 0.462
+class board weight: 0.108, IoU: 0.302
+class clutter weight: 0.012, IoU: 0.337
+
+2021-03-22 02:11:25,096 - Model - INFO - Eval mean loss: 1.261635
+2021-03-22 02:11:25,096 - Model - INFO - Eval accuracy: 0.771246
+2021-03-22 02:11:25,097 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 02:11:25,097 - Model - INFO - **** Epoch 70 (70/128) ****
+2021-03-22 02:11:25,097 - Model - INFO - Learning rate:0.000118
+2021-03-22 02:19:42,651 - Model - INFO - Training mean loss: 0.156177
+2021-03-22 02:19:42,651 - Model - INFO - Training accuracy: 0.946950
+2021-03-22 02:19:43,207 - Model - INFO - ---- EPOCH 070 EVALUATION ----
+2021-03-22 02:21:40,383 - Model - INFO - eval mean loss: 1.200016
+2021-03-22 02:21:40,384 - Model - INFO - eval point avg class IoU: 0.418532
+2021-03-22 02:21:40,384 - Model - INFO - eval point accuracy: 0.773502
+2021-03-22 02:21:40,384 - Model - INFO - eval point avg class acc: 0.524954
+2021-03-22 02:21:40,384 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.089, IoU: 0.854
+class floor weight: 0.200, IoU: 0.968
+class wall weight: 0.167, IoU: 0.712
+class beam weight: 0.279, IoU: 0.002
+class column weight: 0.000, IoU: 0.099
+class window weight: 0.019, IoU: 0.536
+class door weight: 0.037, IoU: 0.094
+class table weight: 0.030, IoU: 0.540
+class chair weight: 0.039, IoU: 0.416
+class sofa weight: 0.019, IoU: 0.064
+class bookcase weight: 0.002, IoU: 0.480
+class board weight: 0.108, IoU: 0.351
+class clutter weight: 0.011, IoU: 0.325
+
+2021-03-22 02:21:40,385 - Model - INFO - Eval mean loss: 1.200016
+2021-03-22 02:21:40,385 - Model - INFO - Eval accuracy: 0.773502
+2021-03-22 02:21:40,385 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 02:21:40,385 - Model - INFO - **** Epoch 71 (71/128) ****
+2021-03-22 02:21:40,385 - Model - INFO - Learning rate:0.000082
+2021-03-22 02:29:55,002 - Model - INFO - Training mean loss: 0.152042
+2021-03-22 02:29:55,003 - Model - INFO - Training accuracy: 0.948501
+2021-03-22 02:29:55,003 - Model - INFO - Save model...
+2021-03-22 02:29:55,003 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/model.pth
+2021-03-22 02:29:55,333 - Model - INFO - Saving model....
+2021-03-22 02:29:55,877 - Model - INFO - ---- EPOCH 071 EVALUATION ----
+2021-03-22 02:31:51,251 - Model - INFO - eval mean loss: 1.209003
+2021-03-22 02:31:51,251 - Model - INFO - eval point avg class IoU: 0.424289
+2021-03-22 02:31:51,251 - Model - INFO - eval point accuracy: 0.781586
+2021-03-22 02:31:51,251 - Model - INFO - eval point avg class acc: 0.528847
+2021-03-22 02:31:51,252 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.877
+class floor weight: 0.202, IoU: 0.978
+class wall weight: 0.169, IoU: 0.713
+class beam weight: 0.276, IoU: 0.000
+class column weight: 0.000, IoU: 0.098
+class window weight: 0.018, IoU: 0.511
+class door weight: 0.034, IoU: 0.127
+class table weight: 0.030, IoU: 0.558
+class chair weight: 0.039, IoU: 0.432
+class sofa weight: 0.019, IoU: 0.093
+class bookcase weight: 0.002, IoU: 0.461
+class board weight: 0.107, IoU: 0.330
+class clutter weight: 0.012, IoU: 0.338
+
+2021-03-22 02:31:51,253 - Model - INFO - Eval mean loss: 1.209003
+2021-03-22 02:31:51,253 - Model - INFO - Eval accuracy: 0.781586
+2021-03-22 02:31:51,253 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 02:31:51,253 - Model - INFO - **** Epoch 72 (72/128) ****
+2021-03-22 02:31:51,254 - Model - INFO - Learning rate:0.000082
+2021-03-22 02:40:09,192 - Model - INFO - Training mean loss: 0.148817
+2021-03-22 02:40:09,193 - Model - INFO - Training accuracy: 0.949518
+2021-03-22 02:40:09,739 - Model - INFO - ---- EPOCH 072 EVALUATION ----
+2021-03-22 02:42:07,202 - Model - INFO - eval mean loss: 1.183129
+2021-03-22 02:42:07,202 - Model - INFO - eval point avg class IoU: 0.423313
+2021-03-22 02:42:07,203 - Model - INFO - eval point accuracy: 0.779340
+2021-03-22 02:42:07,203 - Model - INFO - eval point avg class acc: 0.530815
+2021-03-22 02:42:07,203 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.869
+class floor weight: 0.199, IoU: 0.974
+class wall weight: 0.166, IoU: 0.709
+class beam weight: 0.281, IoU: 0.001
+class column weight: 0.000, IoU: 0.081
+class window weight: 0.020, IoU: 0.506
+class door weight: 0.033, IoU: 0.167
+class table weight: 0.030, IoU: 0.553
+class chair weight: 0.038, IoU: 0.403
+class sofa weight: 0.019, IoU: 0.058
+class bookcase weight: 0.003, IoU: 0.500
+class board weight: 0.107, IoU: 0.345
+class clutter weight: 0.012, IoU: 0.338
+
+2021-03-22 02:42:07,203 - Model - INFO - Eval mean loss: 1.183129
+2021-03-22 02:42:07,203 - Model - INFO - Eval accuracy: 0.779340
+2021-03-22 02:42:07,203 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 02:42:07,203 - Model - INFO - **** Epoch 73 (73/128) ****
+2021-03-22 02:42:07,203 - Model - INFO - Learning rate:0.000082
+2021-03-22 02:50:23,958 - Model - INFO - Training mean loss: 0.149377
+2021-03-22 02:50:23,959 - Model - INFO - Training accuracy: 0.949320
+2021-03-22 02:50:24,533 - Model - INFO - ---- EPOCH 073 EVALUATION ----
+2021-03-22 02:52:21,081 - Model - INFO - eval mean loss: 1.173956
+2021-03-22 02:52:21,081 - Model - INFO - eval point avg class IoU: 0.429353
+2021-03-22 02:52:21,082 - Model - INFO - eval point accuracy: 0.782190
+2021-03-22 02:52:21,082 - Model - INFO - eval point avg class acc: 0.533352
+2021-03-22 02:52:21,082 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.867
+class floor weight: 0.201, IoU: 0.978
+class wall weight: 0.167, IoU: 0.720
+class beam weight: 0.278, IoU: 0.002
+class column weight: 0.000, IoU: 0.100
+class window weight: 0.017, IoU: 0.540
+class door weight: 0.033, IoU: 0.174
+class table weight: 0.031, IoU: 0.547
+class chair weight: 0.038, IoU: 0.430
+class sofa weight: 0.019, IoU: 0.064
+class bookcase weight: 0.003, IoU: 0.474
+class board weight: 0.109, IoU: 0.353
+class clutter weight: 0.012, IoU: 0.332
+
+2021-03-22 02:52:21,109 - Model - INFO - Eval mean loss: 1.173956
+2021-03-22 02:52:21,109 - Model - INFO - Eval accuracy: 0.782190
+2021-03-22 02:52:21,109 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 02:52:21,109 - Model - INFO - **** Epoch 74 (74/128) ****
+2021-03-22 02:52:21,109 - Model - INFO - Learning rate:0.000082
+2021-03-22 03:00:42,201 - Model - INFO - Training mean loss: 0.150463
+2021-03-22 03:00:42,202 - Model - INFO - Training accuracy: 0.949043
+2021-03-22 03:00:42,694 - Model - INFO - ---- EPOCH 074 EVALUATION ----
+2021-03-22 03:02:38,988 - Model - INFO - eval mean loss: 1.209698
+2021-03-22 03:02:38,988 - Model - INFO - eval point avg class IoU: 0.423435
+2021-03-22 03:02:38,988 - Model - INFO - eval point accuracy: 0.781191
+2021-03-22 03:02:38,988 - Model - INFO - eval point avg class acc: 0.525409
+2021-03-22 03:02:38,989 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.866
+class floor weight: 0.198, IoU: 0.976
+class wall weight: 0.166, IoU: 0.716
+class beam weight: 0.284, IoU: 0.001
+class column weight: 0.000, IoU: 0.107
+class window weight: 0.018, IoU: 0.499
+class door weight: 0.034, IoU: 0.164
+class table weight: 0.030, IoU: 0.552
+class chair weight: 0.038, IoU: 0.421
+class sofa weight: 0.019, IoU: 0.032
+class bookcase weight: 0.003, IoU: 0.466
+class board weight: 0.109, IoU: 0.361
+class clutter weight: 0.012, IoU: 0.344
+
+2021-03-22 03:02:38,990 - Model - INFO - Eval mean loss: 1.209698
+2021-03-22 03:02:38,990 - Model - INFO - Eval accuracy: 0.781191
+2021-03-22 03:02:38,990 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 03:02:38,990 - Model - INFO - **** Epoch 75 (75/128) ****
+2021-03-22 03:02:38,990 - Model - INFO - Learning rate:0.000082
+2021-03-22 03:10:50,977 - Model - INFO - Training mean loss: 0.149157
+2021-03-22 03:10:50,977 - Model - INFO - Training accuracy: 0.949459
+2021-03-22 03:10:51,465 - Model - INFO - ---- EPOCH 075 EVALUATION ----
+2021-03-22 03:12:47,539 - Model - INFO - eval mean loss: 1.204826
+2021-03-22 03:12:47,539 - Model - INFO - eval point avg class IoU: 0.423865
+2021-03-22 03:12:47,540 - Model - INFO - eval point accuracy: 0.775330
+2021-03-22 03:12:47,540 - Model - INFO - eval point avg class acc: 0.539191
+2021-03-22 03:12:47,540 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.867
+class floor weight: 0.201, IoU: 0.968
+class wall weight: 0.167, IoU: 0.700
+class beam weight: 0.277, IoU: 0.001
+class column weight: 0.000, IoU: 0.113
+class window weight: 0.019, IoU: 0.540
+class door weight: 0.033, IoU: 0.134
+class table weight: 0.030, IoU: 0.563
+class chair weight: 0.038, IoU: 0.411
+class sofa weight: 0.019, IoU: 0.065
+class bookcase weight: 0.003, IoU: 0.469
+class board weight: 0.110, IoU: 0.332
+class clutter weight: 0.011, IoU: 0.346
+
+2021-03-22 03:12:47,540 - Model - INFO - Eval mean loss: 1.204826
+2021-03-22 03:12:47,540 - Model - INFO - Eval accuracy: 0.775330
+2021-03-22 03:12:47,541 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 03:12:47,541 - Model - INFO - **** Epoch 76 (76/128) ****
+2021-03-22 03:12:47,541 - Model - INFO - Learning rate:0.000082
+2021-03-22 03:21:00,228 - Model - INFO - Training mean loss: 0.147597
+2021-03-22 03:21:00,228 - Model - INFO - Training accuracy: 0.949849
+2021-03-22 03:21:00,228 - Model - INFO - Save model...
+2021-03-22 03:21:00,228 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/model.pth
+2021-03-22 03:21:00,398 - Model - INFO - Saving model....
+2021-03-22 03:21:00,908 - Model - INFO - ---- EPOCH 076 EVALUATION ----
+2021-03-22 03:22:56,648 - Model - INFO - eval mean loss: 1.229522
+2021-03-22 03:22:56,649 - Model - INFO - eval point avg class IoU: 0.416588
+2021-03-22 03:22:56,649 - Model - INFO - eval point accuracy: 0.781831
+2021-03-22 03:22:56,649 - Model - INFO - eval point avg class acc: 0.520212
+2021-03-22 03:22:56,649 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.875
+class floor weight: 0.201, IoU: 0.973
+class wall weight: 0.168, IoU: 0.709
+class beam weight: 0.279, IoU: 0.001
+class column weight: 0.000, IoU: 0.098
+class window weight: 0.017, IoU: 0.533
+class door weight: 0.034, IoU: 0.092
+class table weight: 0.030, IoU: 0.559
+class chair weight: 0.039, IoU: 0.388
+class sofa weight: 0.019, IoU: 0.054
+class bookcase weight: 0.003, IoU: 0.460
+class board weight: 0.109, IoU: 0.332
+class clutter weight: 0.011, IoU: 0.343
+
+2021-03-22 03:22:56,650 - Model - INFO - Eval mean loss: 1.229522
+2021-03-22 03:22:56,650 - Model - INFO - Eval accuracy: 0.781831
+2021-03-22 03:22:56,650 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 03:22:56,650 - Model - INFO - **** Epoch 77 (77/128) ****
+2021-03-22 03:22:56,650 - Model - INFO - Learning rate:0.000082
+2021-03-22 03:31:15,475 - Model - INFO - Training mean loss: 0.149044
+2021-03-22 03:31:15,476 - Model - INFO - Training accuracy: 0.949316
+2021-03-22 03:31:16,070 - Model - INFO - ---- EPOCH 077 EVALUATION ----
+2021-03-22 03:33:12,661 - Model - INFO - eval mean loss: 1.181662
+2021-03-22 03:33:12,661 - Model - INFO - eval point avg class IoU: 0.422972
+2021-03-22 03:33:12,661 - Model - INFO - eval point accuracy: 0.772862
+2021-03-22 03:33:12,662 - Model - INFO - eval point avg class acc: 0.535450
+2021-03-22 03:33:12,662 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.863
+class floor weight: 0.202, IoU: 0.975
+class wall weight: 0.169, IoU: 0.705
+class beam weight: 0.277, IoU: 0.002
+class column weight: 0.000, IoU: 0.115
+class window weight: 0.018, IoU: 0.530
+class door weight: 0.033, IoU: 0.156
+class table weight: 0.030, IoU: 0.539
+class chair weight: 0.039, IoU: 0.408
+class sofa weight: 0.019, IoU: 0.051
+class bookcase weight: 0.003, IoU: 0.477
+class board weight: 0.108, IoU: 0.345
+class clutter weight: 0.011, IoU: 0.333
+
+2021-03-22 03:33:12,662 - Model - INFO - Eval mean loss: 1.181662
+2021-03-22 03:33:12,662 - Model - INFO - Eval accuracy: 0.772862
+2021-03-22 03:33:12,662 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 03:33:12,662 - Model - INFO - **** Epoch 78 (78/128) ****
+2021-03-22 03:33:12,662 - Model - INFO - Learning rate:0.000082
+2021-03-22 03:41:30,480 - Model - INFO - Training mean loss: 0.148311
+2021-03-22 03:41:30,481 - Model - INFO - Training accuracy: 0.949638
+2021-03-22 03:41:31,025 - Model - INFO - ---- EPOCH 078 EVALUATION ----
+2021-03-22 03:43:26,024 - Model - INFO - eval mean loss: 1.192823
+2021-03-22 03:43:26,024 - Model - INFO - eval point avg class IoU: 0.430295
+2021-03-22 03:43:26,025 - Model - INFO - eval point accuracy: 0.782699
+2021-03-22 03:43:26,025 - Model - INFO - eval point avg class acc: 0.531702
+2021-03-22 03:43:26,025 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.866
+class floor weight: 0.201, IoU: 0.971
+class wall weight: 0.167, IoU: 0.720
+class beam weight: 0.278, IoU: 0.001
+class column weight: 0.000, IoU: 0.107
+class window weight: 0.016, IoU: 0.550
+class door weight: 0.032, IoU: 0.140
+class table weight: 0.032, IoU: 0.558
+class chair weight: 0.039, IoU: 0.412
+class sofa weight: 0.019, IoU: 0.055
+class bookcase weight: 0.003, IoU: 0.474
+class board weight: 0.108, IoU: 0.395
+class clutter weight: 0.013, IoU: 0.346
+
+2021-03-22 03:43:26,025 - Model - INFO - Eval mean loss: 1.192823
+2021-03-22 03:43:26,025 - Model - INFO - Eval accuracy: 0.782699
+2021-03-22 03:43:26,026 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 03:43:26,026 - Model - INFO - **** Epoch 79 (79/128) ****
+2021-03-22 03:43:26,026 - Model - INFO - Learning rate:0.000082
+2021-03-22 03:51:04,816 - Model - INFO - Training mean loss: 0.147066
+2021-03-22 03:51:04,817 - Model - INFO - Training accuracy: 0.950026
+2021-03-22 03:51:05,265 - Model - INFO - ---- EPOCH 079 EVALUATION ----
+2021-03-22 03:52:44,950 - Model - INFO - eval mean loss: 1.266621
+2021-03-22 03:52:44,950 - Model - INFO - eval point avg class IoU: 0.421733
+2021-03-22 03:52:44,950 - Model - INFO - eval point accuracy: 0.775220
+2021-03-22 03:52:44,951 - Model - INFO - eval point avg class acc: 0.524932
+2021-03-22 03:52:44,951 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.859
+class floor weight: 0.199, IoU: 0.967
+class wall weight: 0.165, IoU: 0.706
+class beam weight: 0.280, IoU: 0.002
+class column weight: 0.000, IoU: 0.092
+class window weight: 0.019, IoU: 0.542
+class door weight: 0.033, IoU: 0.129
+class table weight: 0.031, IoU: 0.561
+class chair weight: 0.039, IoU: 0.435
+class sofa weight: 0.018, IoU: 0.050
+class bookcase weight: 0.002, IoU: 0.464
+class board weight: 0.109, IoU: 0.342
+class clutter weight: 0.012, IoU: 0.334
+
+2021-03-22 03:52:44,951 - Model - INFO - Eval mean loss: 1.266621
+2021-03-22 03:52:44,951 - Model - INFO - Eval accuracy: 0.775220
+2021-03-22 03:52:44,951 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 03:52:44,951 - Model - INFO - **** Epoch 80 (80/128) ****
+2021-03-22 03:52:44,951 - Model - INFO - Learning rate:0.000082
+2021-03-22 03:59:54,537 - Model - INFO - Training mean loss: 0.147246
+2021-03-22 03:59:54,538 - Model - INFO - Training accuracy: 0.950042
+2021-03-22 03:59:55,005 - Model - INFO - ---- EPOCH 080 EVALUATION ----
+2021-03-22 04:01:33,594 - Model - INFO - eval mean loss: 1.253026
+2021-03-22 04:01:33,595 - Model - INFO - eval point avg class IoU: 0.422211
+2021-03-22 04:01:33,595 - Model - INFO - eval point accuracy: 0.773124
+2021-03-22 04:01:33,595 - Model - INFO - eval point avg class acc: 0.536139
+2021-03-22 04:01:33,595 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.841
+class floor weight: 0.200, IoU: 0.979
+class wall weight: 0.167, IoU: 0.702
+class beam weight: 0.280, IoU: 0.003
+class column weight: 0.000, IoU: 0.096
+class window weight: 0.018, IoU: 0.561
+class door weight: 0.033, IoU: 0.142
+class table weight: 0.032, IoU: 0.554
+class chair weight: 0.039, IoU: 0.441
+class sofa weight: 0.018, IoU: 0.052
+class bookcase weight: 0.003, IoU: 0.443
+class board weight: 0.107, IoU: 0.337
+class clutter weight: 0.012, IoU: 0.337
+
+2021-03-22 04:01:33,595 - Model - INFO - Eval mean loss: 1.253026
+2021-03-22 04:01:33,595 - Model - INFO - Eval accuracy: 0.773124
+2021-03-22 04:01:33,595 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 04:01:33,596 - Model - INFO - **** Epoch 81 (81/128) ****
+2021-03-22 04:01:33,596 - Model - INFO - Learning rate:0.000058
+2021-03-22 04:08:39,779 - Model - INFO - Training mean loss: 0.144255
+2021-03-22 04:08:39,779 - Model - INFO - Training accuracy: 0.951018
+2021-03-22 04:08:39,779 - Model - INFO - Save model...
+2021-03-22 04:08:39,780 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/model.pth
+2021-03-22 04:08:40,331 - Model - INFO - Saving model....
+2021-03-22 04:08:40,754 - Model - INFO - ---- EPOCH 081 EVALUATION ----
+2021-03-22 04:10:18,621 - Model - INFO - eval mean loss: 1.157479
+2021-03-22 04:10:18,621 - Model - INFO - eval point avg class IoU: 0.429996
+2021-03-22 04:10:18,621 - Model - INFO - eval point accuracy: 0.786463
+2021-03-22 04:10:18,621 - Model - INFO - eval point avg class acc: 0.534410
+2021-03-22 04:10:18,622 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.872
+class floor weight: 0.201, IoU: 0.975
+class wall weight: 0.168, IoU: 0.723
+class beam weight: 0.278, IoU: 0.001
+class column weight: 0.000, IoU: 0.090
+class window weight: 0.017, IoU: 0.544
+class door weight: 0.035, IoU: 0.173
+class table weight: 0.029, IoU: 0.549
+class chair weight: 0.038, IoU: 0.445
+class sofa weight: 0.019, IoU: 0.050
+class bookcase weight: 0.003, IoU: 0.488
+class board weight: 0.109, IoU: 0.335
+class clutter weight: 0.011, IoU: 0.345
+
+2021-03-22 04:10:18,626 - Model - INFO - Eval mean loss: 1.157479
+2021-03-22 04:10:18,626 - Model - INFO - Eval accuracy: 0.786463
+2021-03-22 04:10:18,626 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 04:10:18,626 - Model - INFO - **** Epoch 82 (82/128) ****
+2021-03-22 04:10:18,626 - Model - INFO - Learning rate:0.000058
+2021-03-22 04:17:24,854 - Model - INFO - Training mean loss: 0.143203
+2021-03-22 04:17:24,855 - Model - INFO - Training accuracy: 0.951350
+2021-03-22 04:17:25,262 - Model - INFO - ---- EPOCH 082 EVALUATION ----
+2021-03-22 04:19:07,268 - Model - INFO - eval mean loss: 1.254947
+2021-03-22 04:19:07,268 - Model - INFO - eval point avg class IoU: 0.420800
+2021-03-22 04:19:07,268 - Model - INFO - eval point accuracy: 0.775134
+2021-03-22 04:19:07,268 - Model - INFO - eval point avg class acc: 0.531098
+2021-03-22 04:19:07,268 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.856
+class floor weight: 0.201, IoU: 0.972
+class wall weight: 0.168, IoU: 0.706
+class beam weight: 0.277, IoU: 0.002
+class column weight: 0.000, IoU: 0.095
+class window weight: 0.018, IoU: 0.545
+class door weight: 0.033, IoU: 0.141
+class table weight: 0.028, IoU: 0.547
+class chair weight: 0.039, IoU: 0.426
+class sofa weight: 0.019, IoU: 0.047
+class bookcase weight: 0.003, IoU: 0.457
+class board weight: 0.109, IoU: 0.337
+class clutter weight: 0.011, IoU: 0.340
+
+2021-03-22 04:19:07,270 - Model - INFO - Eval mean loss: 1.254947
+2021-03-22 04:19:07,270 - Model - INFO - Eval accuracy: 0.775134
+2021-03-22 04:19:07,270 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 04:19:07,270 - Model - INFO - **** Epoch 83 (83/128) ****
+2021-03-22 04:19:07,270 - Model - INFO - Learning rate:0.000058
+2021-03-22 04:26:19,300 - Model - INFO - Training mean loss: 0.142715
+2021-03-22 04:26:19,300 - Model - INFO - Training accuracy: 0.951488
+2021-03-22 04:26:19,725 - Model - INFO - ---- EPOCH 083 EVALUATION ----
+2021-03-22 04:27:58,899 - Model - INFO - eval mean loss: 1.188612
+2021-03-22 04:27:58,900 - Model - INFO - eval point avg class IoU: 0.426288
+2021-03-22 04:27:58,900 - Model - INFO - eval point accuracy: 0.782799
+2021-03-22 04:27:58,900 - Model - INFO - eval point avg class acc: 0.527993
+2021-03-22 04:27:58,900 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.870
+class floor weight: 0.202, IoU: 0.971
+class wall weight: 0.170, IoU: 0.711
+class beam weight: 0.276, IoU: 0.002
+class column weight: 0.000, IoU: 0.118
+class window weight: 0.016, IoU: 0.551
+class door weight: 0.034, IoU: 0.109
+class table weight: 0.028, IoU: 0.552
+class chair weight: 0.038, IoU: 0.438
+class sofa weight: 0.019, IoU: 0.066
+class bookcase weight: 0.003, IoU: 0.476
+class board weight: 0.109, IoU: 0.341
+class clutter weight: 0.012, IoU: 0.336
+
+2021-03-22 04:27:58,900 - Model - INFO - Eval mean loss: 1.188612
+2021-03-22 04:27:58,900 - Model - INFO - Eval accuracy: 0.782799
+2021-03-22 04:27:58,900 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 04:27:58,900 - Model - INFO - **** Epoch 84 (84/128) ****
+2021-03-22 04:27:58,900 - Model - INFO - Learning rate:0.000058
+2021-03-22 04:35:06,953 - Model - INFO - Training mean loss: 0.142204
+2021-03-22 04:35:06,954 - Model - INFO - Training accuracy: 0.951663
+2021-03-22 04:35:07,360 - Model - INFO - ---- EPOCH 084 EVALUATION ----
+2021-03-22 04:36:51,246 - Model - INFO - eval mean loss: 1.249742
+2021-03-22 04:36:51,246 - Model - INFO - eval point avg class IoU: 0.421049
+2021-03-22 04:36:51,246 - Model - INFO - eval point accuracy: 0.775514
+2021-03-22 04:36:51,246 - Model - INFO - eval point avg class acc: 0.529021
+2021-03-22 04:36:51,247 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.856
+class floor weight: 0.198, IoU: 0.970
+class wall weight: 0.167, IoU: 0.716
+class beam weight: 0.283, IoU: 0.001
+class column weight: 0.000, IoU: 0.072
+class window weight: 0.018, IoU: 0.553
+class door weight: 0.031, IoU: 0.142
+class table weight: 0.032, IoU: 0.553
+class chair weight: 0.038, IoU: 0.446
+class sofa weight: 0.019, IoU: 0.053
+class bookcase weight: 0.003, IoU: 0.474
+class board weight: 0.109, IoU: 0.305
+class clutter weight: 0.012, IoU: 0.335
+
+2021-03-22 04:36:51,248 - Model - INFO - Eval mean loss: 1.249742
+2021-03-22 04:36:51,248 - Model - INFO - Eval accuracy: 0.775514
+2021-03-22 04:36:51,248 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 04:36:51,248 - Model - INFO - **** Epoch 85 (85/128) ****
+2021-03-22 04:36:51,248 - Model - INFO - Learning rate:0.000058
+2021-03-22 04:43:57,587 - Model - INFO - Training mean loss: 0.143562
+2021-03-22 04:43:57,587 - Model - INFO - Training accuracy: 0.951208
+2021-03-22 04:43:57,976 - Model - INFO - ---- EPOCH 085 EVALUATION ----
+2021-03-22 04:45:39,415 - Model - INFO - eval mean loss: 1.193307
+2021-03-22 04:45:39,416 - Model - INFO - eval point avg class IoU: 0.422398
+2021-03-22 04:45:39,416 - Model - INFO - eval point accuracy: 0.780517
+2021-03-22 04:45:39,416 - Model - INFO - eval point avg class acc: 0.534946
+2021-03-22 04:45:39,416 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.862
+class floor weight: 0.201, IoU: 0.964
+class wall weight: 0.168, IoU: 0.713
+class beam weight: 0.280, IoU: 0.002
+class column weight: 0.000, IoU: 0.098
+class window weight: 0.017, IoU: 0.536
+class door weight: 0.031, IoU: 0.129
+class table weight: 0.030, IoU: 0.558
+class chair weight: 0.039, IoU: 0.438
+class sofa weight: 0.019, IoU: 0.055
+class bookcase weight: 0.003, IoU: 0.481
+class board weight: 0.108, IoU: 0.311
+class clutter weight: 0.012, IoU: 0.344
+
+2021-03-22 04:45:39,417 - Model - INFO - Eval mean loss: 1.193307
+2021-03-22 04:45:39,417 - Model - INFO - Eval accuracy: 0.780517
+2021-03-22 04:45:39,417 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 04:45:39,417 - Model - INFO - **** Epoch 86 (86/128) ****
+2021-03-22 04:45:39,417 - Model - INFO - Learning rate:0.000058
+2021-03-22 04:52:53,415 - Model - INFO - Training mean loss: 0.141411
+2021-03-22 04:52:53,415 - Model - INFO - Training accuracy: 0.951882
+2021-03-22 04:52:53,416 - Model - INFO - Save model...
+2021-03-22 04:52:53,416 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/model.pth
+2021-03-22 04:52:53,566 - Model - INFO - Saving model....
+2021-03-22 04:52:53,982 - Model - INFO - ---- EPOCH 086 EVALUATION ----
+2021-03-22 04:54:37,998 - Model - INFO - eval mean loss: 1.197363
+2021-03-22 04:54:37,998 - Model - INFO - eval point avg class IoU: 0.424175
+2021-03-22 04:54:38,001 - Model - INFO - eval point accuracy: 0.777566
+2021-03-22 04:54:38,002 - Model - INFO - eval point avg class acc: 0.525149
+2021-03-22 04:54:38,002 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.858
+class floor weight: 0.201, IoU: 0.964
+class wall weight: 0.167, IoU: 0.718
+class beam weight: 0.281, IoU: 0.001
+class column weight: 0.000, IoU: 0.085
+class window weight: 0.018, IoU: 0.541
+class door weight: 0.033, IoU: 0.170
+class table weight: 0.029, IoU: 0.554
+class chair weight: 0.038, IoU: 0.413
+class sofa weight: 0.019, IoU: 0.033
+class bookcase weight: 0.003, IoU: 0.462
+class board weight: 0.109, IoU: 0.382
+class clutter weight: 0.012, IoU: 0.334
+
+2021-03-22 04:54:38,002 - Model - INFO - Eval mean loss: 1.197363
+2021-03-22 04:54:38,002 - Model - INFO - Eval accuracy: 0.777566
+2021-03-22 04:54:38,002 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 04:54:38,002 - Model - INFO - **** Epoch 87 (87/128) ****
+2021-03-22 04:54:38,002 - Model - INFO - Learning rate:0.000058
+2021-03-22 05:01:47,451 - Model - INFO - Training mean loss: 0.141960
+2021-03-22 05:01:47,452 - Model - INFO - Training accuracy: 0.951733
+2021-03-22 05:01:47,864 - Model - INFO - ---- EPOCH 087 EVALUATION ----
+2021-03-22 05:03:31,656 - Model - INFO - eval mean loss: 1.235096
+2021-03-22 05:03:31,657 - Model - INFO - eval point avg class IoU: 0.425789
+2021-03-22 05:03:31,657 - Model - INFO - eval point accuracy: 0.778088
+2021-03-22 05:03:31,657 - Model - INFO - eval point avg class acc: 0.539492
+2021-03-22 05:03:31,657 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.872
+class floor weight: 0.199, IoU: 0.969
+class wall weight: 0.166, IoU: 0.718
+class beam weight: 0.281, IoU: 0.002
+class column weight: 0.000, IoU: 0.098
+class window weight: 0.018, IoU: 0.548
+class door weight: 0.032, IoU: 0.166
+class table weight: 0.030, IoU: 0.543
+class chair weight: 0.037, IoU: 0.411
+class sofa weight: 0.019, IoU: 0.063
+class bookcase weight: 0.003, IoU: 0.452
+class board weight: 0.111, IoU: 0.353
+class clutter weight: 0.012, IoU: 0.339
+
+2021-03-22 05:03:31,658 - Model - INFO - Eval mean loss: 1.235096
+2021-03-22 05:03:31,658 - Model - INFO - Eval accuracy: 0.778088
+2021-03-22 05:03:31,658 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 05:03:31,658 - Model - INFO - **** Epoch 88 (88/128) ****
+2021-03-22 05:03:31,658 - Model - INFO - Learning rate:0.000058
+2021-03-22 05:10:40,439 - Model - INFO - Training mean loss: 0.141664
+2021-03-22 05:10:40,439 - Model - INFO - Training accuracy: 0.951840
+2021-03-22 05:10:40,856 - Model - INFO - ---- EPOCH 088 EVALUATION ----
+2021-03-22 05:12:42,025 - Model - INFO - eval mean loss: 1.228754
+2021-03-22 05:12:42,025 - Model - INFO - eval point avg class IoU: 0.425780
+2021-03-22 05:12:42,025 - Model - INFO - eval point accuracy: 0.784582
+2021-03-22 05:12:42,025 - Model - INFO - eval point avg class acc: 0.527421
+2021-03-22 05:12:42,025 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.089, IoU: 0.863
+class floor weight: 0.201, IoU: 0.968
+class wall weight: 0.169, IoU: 0.713
+class beam weight: 0.277, IoU: 0.002
+class column weight: 0.000, IoU: 0.099
+class window weight: 0.019, IoU: 0.557
+class door weight: 0.035, IoU: 0.125
+class table weight: 0.029, IoU: 0.550
+class chair weight: 0.038, IoU: 0.436
+class sofa weight: 0.019, IoU: 0.053
+class bookcase weight: 0.002, IoU: 0.481
+class board weight: 0.108, IoU: 0.351
+class clutter weight: 0.012, IoU: 0.338
+
+2021-03-22 05:12:42,026 - Model - INFO - Eval mean loss: 1.228754
+2021-03-22 05:12:42,026 - Model - INFO - Eval accuracy: 0.784582
+2021-03-22 05:12:42,026 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 05:12:42,026 - Model - INFO - **** Epoch 89 (89/128) ****
+2021-03-22 05:12:42,026 - Model - INFO - Learning rate:0.000058
+2021-03-22 05:19:54,263 - Model - INFO - Training mean loss: 0.140902
+2021-03-22 05:19:54,264 - Model - INFO - Training accuracy: 0.952095
+2021-03-22 05:19:54,730 - Model - INFO - ---- EPOCH 089 EVALUATION ----
+2021-03-22 05:21:40,032 - Model - INFO - eval mean loss: 1.178123
+2021-03-22 05:21:40,032 - Model - INFO - eval point avg class IoU: 0.427377
+2021-03-22 05:21:40,032 - Model - INFO - eval point accuracy: 0.782205
+2021-03-22 05:21:40,032 - Model - INFO - eval point avg class acc: 0.532812
+2021-03-22 05:21:40,032 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.856
+class floor weight: 0.201, IoU: 0.972
+class wall weight: 0.169, IoU: 0.712
+class beam weight: 0.279, IoU: 0.001
+class column weight: 0.000, IoU: 0.105
+class window weight: 0.017, IoU: 0.552
+class door weight: 0.032, IoU: 0.150
+class table weight: 0.031, IoU: 0.550
+class chair weight: 0.039, IoU: 0.400
+class sofa weight: 0.019, IoU: 0.071
+class bookcase weight: 0.003, IoU: 0.480
+class board weight: 0.108, IoU: 0.364
+class clutter weight: 0.012, IoU: 0.342
+
+2021-03-22 05:21:40,033 - Model - INFO - Eval mean loss: 1.178123
+2021-03-22 05:21:40,033 - Model - INFO - Eval accuracy: 0.782205
+2021-03-22 05:21:40,033 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 05:21:40,033 - Model - INFO - **** Epoch 90 (90/128) ****
+2021-03-22 05:21:40,033 - Model - INFO - Learning rate:0.000058
+2021-03-22 05:28:46,413 - Model - INFO - Training mean loss: 0.140641
+2021-03-22 05:28:46,414 - Model - INFO - Training accuracy: 0.952306
+2021-03-22 05:28:46,851 - Model - INFO - ---- EPOCH 090 EVALUATION ----
+2021-03-22 05:30:29,021 - Model - INFO - eval mean loss: 1.196854
+2021-03-22 05:30:29,021 - Model - INFO - eval point avg class IoU: 0.426678
+2021-03-22 05:30:29,021 - Model - INFO - eval point accuracy: 0.781986
+2021-03-22 05:30:29,021 - Model - INFO - eval point avg class acc: 0.532009
+2021-03-22 05:30:29,021 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.846
+class floor weight: 0.202, IoU: 0.976
+class wall weight: 0.169, IoU: 0.711
+class beam weight: 0.276, IoU: 0.002
+class column weight: 0.000, IoU: 0.082
+class window weight: 0.018, IoU: 0.536
+class door weight: 0.033, IoU: 0.144
+class table weight: 0.029, IoU: 0.559
+class chair weight: 0.038, IoU: 0.426
+class sofa weight: 0.019, IoU: 0.085
+class bookcase weight: 0.003, IoU: 0.490
+class board weight: 0.110, IoU: 0.352
+class clutter weight: 0.012, IoU: 0.339
+
+2021-03-22 05:30:29,022 - Model - INFO - Eval mean loss: 1.196854
+2021-03-22 05:30:29,022 - Model - INFO - Eval accuracy: 0.781986
+2021-03-22 05:30:29,022 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 05:30:29,022 - Model - INFO - **** Epoch 91 (91/128) ****
+2021-03-22 05:30:29,022 - Model - INFO - Learning rate:0.000040
+2021-03-22 05:37:35,612 - Model - INFO - Training mean loss: 0.138643
+2021-03-22 05:37:35,612 - Model - INFO - Training accuracy: 0.952840
+2021-03-22 05:37:35,612 - Model - INFO - Save model...
+2021-03-22 05:37:35,612 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/model.pth
+2021-03-22 05:37:35,787 - Model - INFO - Saving model....
+2021-03-22 05:37:36,317 - Model - INFO - ---- EPOCH 091 EVALUATION ----
+2021-03-22 05:39:22,907 - Model - INFO - eval mean loss: 1.265873
+2021-03-22 05:39:22,908 - Model - INFO - eval point avg class IoU: 0.421214
+2021-03-22 05:39:22,908 - Model - INFO - eval point accuracy: 0.776727
+2021-03-22 05:39:22,908 - Model - INFO - eval point avg class acc: 0.523679
+2021-03-22 05:39:22,908 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.856
+class floor weight: 0.199, IoU: 0.975
+class wall weight: 0.168, IoU: 0.711
+class beam weight: 0.281, IoU: 0.001
+class column weight: 0.000, IoU: 0.120
+class window weight: 0.018, IoU: 0.521
+class door weight: 0.034, IoU: 0.132
+class table weight: 0.032, IoU: 0.549
+class chair weight: 0.038, IoU: 0.412
+class sofa weight: 0.019, IoU: 0.050
+class bookcase weight: 0.003, IoU: 0.459
+class board weight: 0.107, IoU: 0.353
+class clutter weight: 0.011, IoU: 0.337
+
+2021-03-22 05:39:22,908 - Model - INFO - Eval mean loss: 1.265873
+2021-03-22 05:39:22,908 - Model - INFO - Eval accuracy: 0.776727
+2021-03-22 05:39:22,908 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 05:39:22,909 - Model - INFO - **** Epoch 92 (92/128) ****
+2021-03-22 05:39:22,909 - Model - INFO - Learning rate:0.000040
+2021-03-22 05:46:31,263 - Model - INFO - Training mean loss: 0.139198
+2021-03-22 05:46:31,264 - Model - INFO - Training accuracy: 0.952662
+2021-03-22 05:46:31,741 - Model - INFO - ---- EPOCH 092 EVALUATION ----
+2021-03-22 05:48:13,888 - Model - INFO - eval mean loss: 1.274513
+2021-03-22 05:48:13,888 - Model - INFO - eval point avg class IoU: 0.425884
+2021-03-22 05:48:13,888 - Model - INFO - eval point accuracy: 0.780097
+2021-03-22 05:48:13,888 - Model - INFO - eval point avg class acc: 0.530541
+2021-03-22 05:48:13,888 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.853
+class floor weight: 0.202, IoU: 0.972
+class wall weight: 0.167, IoU: 0.711
+class beam weight: 0.277, IoU: 0.002
+class column weight: 0.000, IoU: 0.082
+class window weight: 0.020, IoU: 0.532
+class door weight: 0.035, IoU: 0.119
+class table weight: 0.029, IoU: 0.560
+class chair weight: 0.039, IoU: 0.429
+class sofa weight: 0.019, IoU: 0.073
+class bookcase weight: 0.003, IoU: 0.472
+class board weight: 0.108, IoU: 0.391
+class clutter weight: 0.011, IoU: 0.341
+
+2021-03-22 05:48:13,888 - Model - INFO - Eval mean loss: 1.274513
+2021-03-22 05:48:13,888 - Model - INFO - Eval accuracy: 0.780097
+2021-03-22 05:48:13,888 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 05:48:13,888 - Model - INFO - **** Epoch 93 (93/128) ****
+2021-03-22 05:48:13,889 - Model - INFO - Learning rate:0.000040
+2021-03-22 05:55:22,428 - Model - INFO - Training mean loss: 0.137305
+2021-03-22 05:55:22,429 - Model - INFO - Training accuracy: 0.953191
+2021-03-22 05:55:22,818 - Model - INFO - ---- EPOCH 093 EVALUATION ----
+2021-03-22 05:57:05,761 - Model - INFO - eval mean loss: 1.222013
+2021-03-22 05:57:05,761 - Model - INFO - eval point avg class IoU: 0.422806
+2021-03-22 05:57:05,761 - Model - INFO - eval point accuracy: 0.781930
+2021-03-22 05:57:05,762 - Model - INFO - eval point avg class acc: 0.523921
+2021-03-22 05:57:05,762 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.874
+class floor weight: 0.199, IoU: 0.973
+class wall weight: 0.167, IoU: 0.722
+class beam weight: 0.281, IoU: 0.001
+class column weight: 0.000, IoU: 0.106
+class window weight: 0.018, IoU: 0.564
+class door weight: 0.032, IoU: 0.098
+class table weight: 0.029, IoU: 0.559
+class chair weight: 0.039, IoU: 0.426
+class sofa weight: 0.019, IoU: 0.033
+class bookcase weight: 0.003, IoU: 0.458
+class board weight: 0.110, IoU: 0.349
+class clutter weight: 0.012, IoU: 0.335
+
+2021-03-22 05:57:05,762 - Model - INFO - Eval mean loss: 1.222013
+2021-03-22 05:57:05,763 - Model - INFO - Eval accuracy: 0.781930
+2021-03-22 05:57:05,763 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 05:57:05,763 - Model - INFO - **** Epoch 94 (94/128) ****
+2021-03-22 05:57:05,763 - Model - INFO - Learning rate:0.000040
+2021-03-22 06:04:12,569 - Model - INFO - Training mean loss: 0.138167
+2021-03-22 06:04:12,569 - Model - INFO - Training accuracy: 0.952953
+2021-03-22 06:04:13,025 - Model - INFO - ---- EPOCH 094 EVALUATION ----
+2021-03-22 06:05:54,152 - Model - INFO - eval mean loss: 1.206476
+2021-03-22 06:05:54,152 - Model - INFO - eval point avg class IoU: 0.426979
+2021-03-22 06:05:54,152 - Model - INFO - eval point accuracy: 0.777812
+2021-03-22 06:05:54,152 - Model - INFO - eval point avg class acc: 0.538309
+2021-03-22 06:05:54,153 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.866
+class floor weight: 0.202, IoU: 0.965
+class wall weight: 0.167, IoU: 0.714
+class beam weight: 0.277, IoU: 0.001
+class column weight: 0.000, IoU: 0.110
+class window weight: 0.019, IoU: 0.564
+class door weight: 0.033, IoU: 0.132
+class table weight: 0.030, IoU: 0.558
+class chair weight: 0.039, IoU: 0.423
+class sofa weight: 0.018, IoU: 0.097
+class bookcase weight: 0.003, IoU: 0.463
+class board weight: 0.108, IoU: 0.324
+class clutter weight: 0.012, IoU: 0.334
+
+2021-03-22 06:05:54,153 - Model - INFO - Eval mean loss: 1.206476
+2021-03-22 06:05:54,153 - Model - INFO - Eval accuracy: 0.777812
+2021-03-22 06:05:54,153 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 06:05:54,153 - Model - INFO - **** Epoch 95 (95/128) ****
+2021-03-22 06:05:54,153 - Model - INFO - Learning rate:0.000040
+2021-03-22 06:13:02,026 - Model - INFO - Training mean loss: 0.136827
+2021-03-22 06:13:02,027 - Model - INFO - Training accuracy: 0.953354
+2021-03-22 06:13:02,436 - Model - INFO - ---- EPOCH 095 EVALUATION ----
+2021-03-22 06:14:40,690 - Model - INFO - eval mean loss: 1.216490
+2021-03-22 06:14:40,690 - Model - INFO - eval point avg class IoU: 0.423723
+2021-03-22 06:14:40,690 - Model - INFO - eval point accuracy: 0.779963
+2021-03-22 06:14:40,690 - Model - INFO - eval point avg class acc: 0.528977
+2021-03-22 06:14:40,690 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.864
+class floor weight: 0.201, IoU: 0.972
+class wall weight: 0.168, IoU: 0.718
+class beam weight: 0.281, IoU: 0.001
+class column weight: 0.000, IoU: 0.081
+class window weight: 0.017, IoU: 0.557
+class door weight: 0.032, IoU: 0.159
+class table weight: 0.030, IoU: 0.543
+class chair weight: 0.038, IoU: 0.437
+class sofa weight: 0.020, IoU: 0.043
+class bookcase weight: 0.003, IoU: 0.468
+class board weight: 0.106, IoU: 0.328
+class clutter weight: 0.012, IoU: 0.337
+
+2021-03-22 06:14:40,692 - Model - INFO - Eval mean loss: 1.216490
+2021-03-22 06:14:40,692 - Model - INFO - Eval accuracy: 0.779963
+2021-03-22 06:14:40,692 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 06:14:40,692 - Model - INFO - **** Epoch 96 (96/128) ****
+2021-03-22 06:14:40,692 - Model - INFO - Learning rate:0.000040
+2021-03-22 06:21:51,157 - Model - INFO - Training mean loss: 0.137935
+2021-03-22 06:21:51,157 - Model - INFO - Training accuracy: 0.953086
+2021-03-22 06:21:51,158 - Model - INFO - Save model...
+2021-03-22 06:21:51,158 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/model.pth
+2021-03-22 06:21:51,365 - Model - INFO - Saving model....
+2021-03-22 06:21:51,865 - Model - INFO - ---- EPOCH 096 EVALUATION ----
+2021-03-22 06:23:39,215 - Model - INFO - eval mean loss: 1.217797
+2021-03-22 06:23:39,215 - Model - INFO - eval point avg class IoU: 0.419506
+2021-03-22 06:23:39,216 - Model - INFO - eval point accuracy: 0.774299
+2021-03-22 06:23:39,216 - Model - INFO - eval point avg class acc: 0.521912
+2021-03-22 06:23:39,216 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.851
+class floor weight: 0.200, IoU: 0.942
+class wall weight: 0.167, IoU: 0.710
+class beam weight: 0.281, IoU: 0.000
+class column weight: 0.000, IoU: 0.090
+class window weight: 0.017, IoU: 0.536
+class door weight: 0.034, IoU: 0.157
+class table weight: 0.030, IoU: 0.542
+class chair weight: 0.038, IoU: 0.402
+class sofa weight: 0.018, IoU: 0.063
+class bookcase weight: 0.003, IoU: 0.479
+class board weight: 0.108, IoU: 0.345
+class clutter weight: 0.012, IoU: 0.336
+
+2021-03-22 06:23:39,216 - Model - INFO - Eval mean loss: 1.217797
+2021-03-22 06:23:39,216 - Model - INFO - Eval accuracy: 0.774299
+2021-03-22 06:23:39,217 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 06:23:39,217 - Model - INFO - **** Epoch 97 (97/128) ****
+2021-03-22 06:23:39,217 - Model - INFO - Learning rate:0.000040
+2021-03-22 06:30:43,807 - Model - INFO - Training mean loss: 0.137776
+2021-03-22 06:30:43,807 - Model - INFO - Training accuracy: 0.953014
+2021-03-22 06:30:44,177 - Model - INFO - ---- EPOCH 097 EVALUATION ----
+2021-03-22 06:32:26,074 - Model - INFO - eval mean loss: 1.268143
+2021-03-22 06:32:26,074 - Model - INFO - eval point avg class IoU: 0.422107
+2021-03-22 06:32:26,074 - Model - INFO - eval point accuracy: 0.773666
+2021-03-22 06:32:26,075 - Model - INFO - eval point avg class acc: 0.529144
+2021-03-22 06:32:26,075 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.089, IoU: 0.856
+class floor weight: 0.201, IoU: 0.967
+class wall weight: 0.168, IoU: 0.705
+class beam weight: 0.277, IoU: 0.001
+class column weight: 0.000, IoU: 0.100
+class window weight: 0.018, IoU: 0.527
+class door weight: 0.035, IoU: 0.159
+class table weight: 0.032, IoU: 0.547
+class chair weight: 0.038, IoU: 0.438
+class sofa weight: 0.019, IoU: 0.050
+class bookcase weight: 0.003, IoU: 0.441
+class board weight: 0.108, IoU: 0.369
+class clutter weight: 0.011, IoU: 0.326
+
+2021-03-22 06:32:26,076 - Model - INFO - Eval mean loss: 1.268143
+2021-03-22 06:32:26,077 - Model - INFO - Eval accuracy: 0.773666
+2021-03-22 06:32:26,077 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 06:32:26,077 - Model - INFO - **** Epoch 98 (98/128) ****
+2021-03-22 06:32:26,077 - Model - INFO - Learning rate:0.000040
+2021-03-22 06:39:33,312 - Model - INFO - Training mean loss: 0.137125
+2021-03-22 06:39:33,312 - Model - INFO - Training accuracy: 0.953321
+2021-03-22 06:39:33,749 - Model - INFO - ---- EPOCH 098 EVALUATION ----
+2021-03-22 06:41:12,803 - Model - INFO - eval mean loss: 1.245519
+2021-03-22 06:41:12,803 - Model - INFO - eval point avg class IoU: 0.424340
+2021-03-22 06:41:12,804 - Model - INFO - eval point accuracy: 0.776479
+2021-03-22 06:41:12,804 - Model - INFO - eval point avg class acc: 0.526702
+2021-03-22 06:41:12,804 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.850
+class floor weight: 0.199, IoU: 0.964
+class wall weight: 0.168, IoU: 0.705
+class beam weight: 0.278, IoU: 0.001
+class column weight: 0.000, IoU: 0.104
+class window weight: 0.020, IoU: 0.572
+class door weight: 0.034, IoU: 0.136
+class table weight: 0.030, IoU: 0.555
+class chair weight: 0.039, IoU: 0.440
+class sofa weight: 0.019, IoU: 0.035
+class bookcase weight: 0.003, IoU: 0.481
+class board weight: 0.107, IoU: 0.339
+class clutter weight: 0.012, IoU: 0.335
+
+2021-03-22 06:41:12,804 - Model - INFO - Eval mean loss: 1.245519
+2021-03-22 06:41:12,804 - Model - INFO - Eval accuracy: 0.776479
+2021-03-22 06:41:12,804 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 06:41:12,804 - Model - INFO - **** Epoch 99 (99/128) ****
+2021-03-22 06:41:12,804 - Model - INFO - Learning rate:0.000040
+2021-03-22 06:48:20,150 - Model - INFO - Training mean loss: 0.136738
+2021-03-22 06:48:20,150 - Model - INFO - Training accuracy: 0.953467
+2021-03-22 06:48:20,546 - Model - INFO - ---- EPOCH 099 EVALUATION ----
+2021-03-22 06:50:04,347 - Model - INFO - eval mean loss: 1.201535
+2021-03-22 06:50:04,348 - Model - INFO - eval point avg class IoU: 0.427304
+2021-03-22 06:50:04,348 - Model - INFO - eval point accuracy: 0.783466
+2021-03-22 06:50:04,348 - Model - INFO - eval point avg class acc: 0.526532
+2021-03-22 06:50:04,349 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.863
+class floor weight: 0.199, IoU: 0.972
+class wall weight: 0.167, IoU: 0.719
+class beam weight: 0.282, IoU: 0.001
+class column weight: 0.000, IoU: 0.099
+class window weight: 0.018, IoU: 0.567
+class door weight: 0.032, IoU: 0.153
+class table weight: 0.029, IoU: 0.546
+class chair weight: 0.039, IoU: 0.431
+class sofa weight: 0.020, IoU: 0.041
+class bookcase weight: 0.003, IoU: 0.477
+class board weight: 0.108, IoU: 0.346
+class clutter weight: 0.012, IoU: 0.340
+
+2021-03-22 06:50:04,349 - Model - INFO - Eval mean loss: 1.201535
+2021-03-22 06:50:04,349 - Model - INFO - Eval accuracy: 0.783466
+2021-03-22 06:50:04,349 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 06:50:04,350 - Model - INFO - **** Epoch 100 (100/128) ****
+2021-03-22 06:50:04,350 - Model - INFO - Learning rate:0.000040
+2021-03-22 06:57:17,117 - Model - INFO - Training mean loss: 0.136930
+2021-03-22 06:57:17,117 - Model - INFO - Training accuracy: 0.953375
+2021-03-22 06:57:17,542 - Model - INFO - ---- EPOCH 100 EVALUATION ----
+2021-03-22 06:59:00,065 - Model - INFO - eval mean loss: 1.224387
+2021-03-22 06:59:00,066 - Model - INFO - eval point avg class IoU: 0.425855
+2021-03-22 06:59:00,066 - Model - INFO - eval point accuracy: 0.775035
+2021-03-22 06:59:00,066 - Model - INFO - eval point avg class acc: 0.531254
+2021-03-22 06:59:00,066 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.857
+class floor weight: 0.201, IoU: 0.970
+class wall weight: 0.167, IoU: 0.712
+class beam weight: 0.279, IoU: 0.001
+class column weight: 0.000, IoU: 0.107
+class window weight: 0.019, IoU: 0.556
+class door weight: 0.033, IoU: 0.139
+class table weight: 0.030, IoU: 0.547
+class chair weight: 0.038, IoU: 0.401
+class sofa weight: 0.018, IoU: 0.101
+class bookcase weight: 0.003, IoU: 0.476
+class board weight: 0.111, IoU: 0.337
+class clutter weight: 0.012, IoU: 0.333
+
+2021-03-22 06:59:00,066 - Model - INFO - Eval mean loss: 1.224387
+2021-03-22 06:59:00,067 - Model - INFO - Eval accuracy: 0.775035
+2021-03-22 06:59:00,067 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 06:59:00,067 - Model - INFO - **** Epoch 101 (101/128) ****
+2021-03-22 06:59:00,067 - Model - INFO - Learning rate:0.000028
+2021-03-22 07:06:11,518 - Model - INFO - Training mean loss: 0.135774
+2021-03-22 07:06:11,518 - Model - INFO - Training accuracy: 0.953751
+2021-03-22 07:06:11,518 - Model - INFO - Save model...
+2021-03-22 07:06:11,518 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/model.pth
+2021-03-22 07:06:12,119 - Model - INFO - Saving model....
+2021-03-22 07:06:12,532 - Model - INFO - ---- EPOCH 101 EVALUATION ----
+2021-03-22 07:07:54,825 - Model - INFO - eval mean loss: 1.210512
+2021-03-22 07:07:54,826 - Model - INFO - eval point avg class IoU: 0.423440
+2021-03-22 07:07:54,826 - Model - INFO - eval point accuracy: 0.777933
+2021-03-22 07:07:54,826 - Model - INFO - eval point avg class acc: 0.527373
+2021-03-22 07:07:54,826 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.857
+class floor weight: 0.201, IoU: 0.969
+class wall weight: 0.169, IoU: 0.709
+class beam weight: 0.276, IoU: 0.001
+class column weight: 0.000, IoU: 0.106
+class window weight: 0.018, IoU: 0.554
+class door weight: 0.033, IoU: 0.119
+class table weight: 0.030, IoU: 0.553
+class chair weight: 0.039, IoU: 0.413
+class sofa weight: 0.019, IoU: 0.076
+class bookcase weight: 0.003, IoU: 0.480
+class board weight: 0.110, IoU: 0.333
+class clutter weight: 0.012, IoU: 0.334
+
+2021-03-22 07:07:54,828 - Model - INFO - Eval mean loss: 1.210512
+2021-03-22 07:07:54,828 - Model - INFO - Eval accuracy: 0.777933
+2021-03-22 07:07:54,828 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 07:07:54,828 - Model - INFO - **** Epoch 102 (102/128) ****
+2021-03-22 07:07:54,828 - Model - INFO - Learning rate:0.000028
+2021-03-22 07:15:01,706 - Model - INFO - Training mean loss: 0.135156
+2021-03-22 07:15:01,707 - Model - INFO - Training accuracy: 0.953925
+2021-03-22 07:15:02,188 - Model - INFO - ---- EPOCH 102 EVALUATION ----
+2021-03-22 07:16:43,589 - Model - INFO - eval mean loss: 1.204642
+2021-03-22 07:16:43,590 - Model - INFO - eval point avg class IoU: 0.424573
+2021-03-22 07:16:43,590 - Model - INFO - eval point accuracy: 0.780204
+2021-03-22 07:16:43,590 - Model - INFO - eval point avg class acc: 0.531976
+2021-03-22 07:16:43,590 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.093, IoU: 0.864
+class floor weight: 0.200, IoU: 0.977
+class wall weight: 0.168, IoU: 0.708
+class beam weight: 0.279, IoU: 0.002
+class column weight: 0.000, IoU: 0.093
+class window weight: 0.018, IoU: 0.543
+class door weight: 0.033, IoU: 0.156
+class table weight: 0.030, IoU: 0.549
+class chair weight: 0.039, IoU: 0.441
+class sofa weight: 0.018, IoU: 0.046
+class bookcase weight: 0.003, IoU: 0.474
+class board weight: 0.110, IoU: 0.319
+class clutter weight: 0.010, IoU: 0.349
+
+2021-03-22 07:16:43,590 - Model - INFO - Eval mean loss: 1.204642
+2021-03-22 07:16:43,591 - Model - INFO - Eval accuracy: 0.780204
+2021-03-22 07:16:43,591 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 07:16:43,591 - Model - INFO - **** Epoch 103 (103/128) ****
+2021-03-22 07:16:43,591 - Model - INFO - Learning rate:0.000028
+2021-03-22 07:23:56,047 - Model - INFO - Training mean loss: 0.135017
+2021-03-22 07:23:56,048 - Model - INFO - Training accuracy: 0.954082
+2021-03-22 07:23:56,581 - Model - INFO - ---- EPOCH 103 EVALUATION ----
+2021-03-22 07:25:39,105 - Model - INFO - eval mean loss: 1.187662
+2021-03-22 07:25:39,105 - Model - INFO - eval point avg class IoU: 0.426417
+2021-03-22 07:25:39,105 - Model - INFO - eval point accuracy: 0.781050
+2021-03-22 07:25:39,106 - Model - INFO - eval point avg class acc: 0.528036
+2021-03-22 07:25:39,106 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.870
+class floor weight: 0.199, IoU: 0.976
+class wall weight: 0.167, IoU: 0.711
+class beam weight: 0.280, IoU: 0.001
+class column weight: 0.000, IoU: 0.098
+class window weight: 0.019, IoU: 0.575
+class door weight: 0.034, IoU: 0.143
+class table weight: 0.030, IoU: 0.555
+class chair weight: 0.039, IoU: 0.404
+class sofa weight: 0.018, IoU: 0.042
+class bookcase weight: 0.003, IoU: 0.480
+class board weight: 0.108, IoU: 0.344
+class clutter weight: 0.012, IoU: 0.345
+
+2021-03-22 07:25:39,106 - Model - INFO - Eval mean loss: 1.187662
+2021-03-22 07:25:39,107 - Model - INFO - Eval accuracy: 0.781050
+2021-03-22 07:25:39,107 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 07:25:39,107 - Model - INFO - **** Epoch 104 (104/128) ****
+2021-03-22 07:25:39,107 - Model - INFO - Learning rate:0.000028
+2021-03-22 07:32:52,301 - Model - INFO - Training mean loss: 0.134132
+2021-03-22 07:32:52,301 - Model - INFO - Training accuracy: 0.954325
+2021-03-22 07:32:52,837 - Model - INFO - ---- EPOCH 104 EVALUATION ----
+2021-03-22 07:34:35,132 - Model - INFO - eval mean loss: 1.202142
+2021-03-22 07:34:35,133 - Model - INFO - eval point avg class IoU: 0.426129
+2021-03-22 07:34:35,133 - Model - INFO - eval point accuracy: 0.779448
+2021-03-22 07:34:35,133 - Model - INFO - eval point avg class acc: 0.528517
+2021-03-22 07:34:35,134 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.869
+class floor weight: 0.201, IoU: 0.974
+class wall weight: 0.167, IoU: 0.707
+class beam weight: 0.278, IoU: 0.000
+class column weight: 0.000, IoU: 0.102
+class window weight: 0.017, IoU: 0.522
+class door weight: 0.034, IoU: 0.147
+class table weight: 0.030, IoU: 0.566
+class chair weight: 0.038, IoU: 0.428
+class sofa weight: 0.019, IoU: 0.057
+class bookcase weight: 0.003, IoU: 0.489
+class board weight: 0.110, IoU: 0.339
+class clutter weight: 0.012, IoU: 0.340
+
+2021-03-22 07:34:35,135 - Model - INFO - Eval mean loss: 1.202142
+2021-03-22 07:34:35,135 - Model - INFO - Eval accuracy: 0.779448
+2021-03-22 07:34:35,135 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 07:34:35,135 - Model - INFO - **** Epoch 105 (105/128) ****
+2021-03-22 07:34:35,135 - Model - INFO - Learning rate:0.000028
+2021-03-22 07:41:52,301 - Model - INFO - Training mean loss: 0.134428
+2021-03-22 07:41:52,301 - Model - INFO - Training accuracy: 0.954244
+2021-03-22 07:41:52,731 - Model - INFO - ---- EPOCH 105 EVALUATION ----
+2021-03-22 07:43:35,309 - Model - INFO - eval mean loss: 1.217973
+2021-03-22 07:43:35,309 - Model - INFO - eval point avg class IoU: 0.424259
+2021-03-22 07:43:35,309 - Model - INFO - eval point accuracy: 0.776619
+2021-03-22 07:43:35,310 - Model - INFO - eval point avg class acc: 0.530720
+2021-03-22 07:43:35,310 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.860
+class floor weight: 0.199, IoU: 0.974
+class wall weight: 0.167, IoU: 0.704
+class beam weight: 0.278, IoU: 0.001
+class column weight: 0.000, IoU: 0.106
+class window weight: 0.019, IoU: 0.541
+class door weight: 0.035, IoU: 0.189
+class table weight: 0.031, IoU: 0.545
+class chair weight: 0.038, IoU: 0.425
+class sofa weight: 0.019, IoU: 0.032
+class bookcase weight: 0.002, IoU: 0.477
+class board weight: 0.108, IoU: 0.330
+class clutter weight: 0.011, IoU: 0.331
+
+2021-03-22 07:43:35,310 - Model - INFO - Eval mean loss: 1.217973
+2021-03-22 07:43:35,310 - Model - INFO - Eval accuracy: 0.776619
+2021-03-22 07:43:35,310 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 07:43:35,310 - Model - INFO - **** Epoch 106 (106/128) ****
+2021-03-22 07:43:35,310 - Model - INFO - Learning rate:0.000028
+2021-03-22 07:50:45,006 - Model - INFO - Training mean loss: 0.133934
+2021-03-22 07:50:45,006 - Model - INFO - Training accuracy: 0.954460
+2021-03-22 07:50:45,006 - Model - INFO - Save model...
+2021-03-22 07:50:45,006 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/model.pth
+2021-03-22 07:50:45,139 - Model - INFO - Saving model....
+2021-03-22 07:50:45,560 - Model - INFO - ---- EPOCH 106 EVALUATION ----
+2021-03-22 07:52:31,672 - Model - INFO - eval mean loss: 1.219044
+2021-03-22 07:52:31,672 - Model - INFO - eval point avg class IoU: 0.422020
+2021-03-22 07:52:31,672 - Model - INFO - eval point accuracy: 0.777981
+2021-03-22 07:52:31,673 - Model - INFO - eval point avg class acc: 0.524623
+2021-03-22 07:52:31,673 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.863
+class floor weight: 0.200, IoU: 0.965
+class wall weight: 0.166, IoU: 0.712
+class beam weight: 0.280, IoU: 0.000
+class column weight: 0.000, IoU: 0.102
+class window weight: 0.018, IoU: 0.521
+class door weight: 0.035, IoU: 0.123
+class table weight: 0.030, IoU: 0.551
+class chair weight: 0.039, IoU: 0.420
+class sofa weight: 0.019, IoU: 0.057
+class bookcase weight: 0.003, IoU: 0.476
+class board weight: 0.107, IoU: 0.352
+class clutter weight: 0.012, IoU: 0.343
+
+2021-03-22 07:52:31,673 - Model - INFO - Eval mean loss: 1.219044
+2021-03-22 07:52:31,673 - Model - INFO - Eval accuracy: 0.777981
+2021-03-22 07:52:31,673 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 07:52:31,673 - Model - INFO - **** Epoch 107 (107/128) ****
+2021-03-22 07:52:31,673 - Model - INFO - Learning rate:0.000028
+2021-03-22 07:59:49,691 - Model - INFO - Training mean loss: 0.133589
+2021-03-22 07:59:49,691 - Model - INFO - Training accuracy: 0.954437
+2021-03-22 07:59:50,228 - Model - INFO - ---- EPOCH 107 EVALUATION ----
+2021-03-22 08:01:49,854 - Model - INFO - eval mean loss: 1.234162
+2021-03-22 08:01:49,855 - Model - INFO - eval point avg class IoU: 0.424746
+2021-03-22 08:01:49,855 - Model - INFO - eval point accuracy: 0.779203
+2021-03-22 08:01:49,855 - Model - INFO - eval point avg class acc: 0.529854
+2021-03-22 08:01:49,855 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.868
+class floor weight: 0.200, IoU: 0.971
+class wall weight: 0.167, IoU: 0.715
+class beam weight: 0.281, IoU: 0.001
+class column weight: 0.000, IoU: 0.087
+class window weight: 0.017, IoU: 0.539
+class door weight: 0.033, IoU: 0.141
+class table weight: 0.031, IoU: 0.534
+class chair weight: 0.039, IoU: 0.430
+class sofa weight: 0.019, IoU: 0.091
+class bookcase weight: 0.003, IoU: 0.472
+class board weight: 0.108, IoU: 0.330
+class clutter weight: 0.012, IoU: 0.341
+
+2021-03-22 08:01:49,857 - Model - INFO - Eval mean loss: 1.234162
+2021-03-22 08:01:49,857 - Model - INFO - Eval accuracy: 0.779203
+2021-03-22 08:01:49,857 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 08:01:49,857 - Model - INFO - **** Epoch 108 (108/128) ****
+2021-03-22 08:01:49,857 - Model - INFO - Learning rate:0.000028
+2021-03-22 08:09:06,041 - Model - INFO - Training mean loss: 0.134170
+2021-03-22 08:09:06,041 - Model - INFO - Training accuracy: 0.954284
+2021-03-22 08:09:06,545 - Model - INFO - ---- EPOCH 108 EVALUATION ----
+2021-03-22 08:11:11,478 - Model - INFO - eval mean loss: 1.219521
+2021-03-22 08:11:11,479 - Model - INFO - eval point avg class IoU: 0.424851
+2021-03-22 08:11:11,479 - Model - INFO - eval point accuracy: 0.777044
+2021-03-22 08:11:11,479 - Model - INFO - eval point avg class acc: 0.527426
+2021-03-22 08:11:11,479 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.860
+class floor weight: 0.201, IoU: 0.969
+class wall weight: 0.166, IoU: 0.712
+class beam weight: 0.278, IoU: 0.001
+class column weight: 0.000, IoU: 0.109
+class window weight: 0.019, IoU: 0.527
+class door weight: 0.035, IoU: 0.130
+class table weight: 0.029, IoU: 0.551
+class chair weight: 0.039, IoU: 0.424
+class sofa weight: 0.019, IoU: 0.093
+class bookcase weight: 0.002, IoU: 0.472
+class board weight: 0.109, IoU: 0.340
+class clutter weight: 0.011, IoU: 0.337
+
+2021-03-22 08:11:11,479 - Model - INFO - Eval mean loss: 1.219521
+2021-03-22 08:11:11,479 - Model - INFO - Eval accuracy: 0.777044
+2021-03-22 08:11:11,479 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 08:11:11,480 - Model - INFO - **** Epoch 109 (109/128) ****
+2021-03-22 08:11:11,480 - Model - INFO - Learning rate:0.000028
+2021-03-22 08:18:25,121 - Model - INFO - Training mean loss: 0.134315
+2021-03-22 08:18:25,121 - Model - INFO - Training accuracy: 0.954280
+2021-03-22 08:18:25,515 - Model - INFO - ---- EPOCH 109 EVALUATION ----
+2021-03-22 08:20:14,140 - Model - INFO - eval mean loss: 1.251083
+2021-03-22 08:20:14,140 - Model - INFO - eval point avg class IoU: 0.422672
+2021-03-22 08:20:14,140 - Model - INFO - eval point accuracy: 0.772989
+2021-03-22 08:20:14,140 - Model - INFO - eval point avg class acc: 0.528567
+2021-03-22 08:20:14,140 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.856
+class floor weight: 0.200, IoU: 0.967
+class wall weight: 0.168, IoU: 0.704
+class beam weight: 0.278, IoU: 0.001
+class column weight: 0.000, IoU: 0.119
+class window weight: 0.018, IoU: 0.524
+class door weight: 0.034, IoU: 0.135
+class table weight: 0.029, IoU: 0.556
+class chair weight: 0.039, IoU: 0.419
+class sofa weight: 0.019, IoU: 0.059
+class bookcase weight: 0.003, IoU: 0.476
+class board weight: 0.109, IoU: 0.350
+class clutter weight: 0.012, IoU: 0.330
+
+2021-03-22 08:20:14,142 - Model - INFO - Eval mean loss: 1.251083
+2021-03-22 08:20:14,142 - Model - INFO - Eval accuracy: 0.772989
+2021-03-22 08:20:14,142 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 08:20:14,142 - Model - INFO - **** Epoch 110 (110/128) ****
+2021-03-22 08:20:14,142 - Model - INFO - Learning rate:0.000028
+2021-03-22 08:27:27,829 - Model - INFO - Training mean loss: 0.133252
+2021-03-22 08:27:27,830 - Model - INFO - Training accuracy: 0.954611
+2021-03-22 08:27:28,279 - Model - INFO - ---- EPOCH 110 EVALUATION ----
+2021-03-22 08:29:13,055 - Model - INFO - eval mean loss: 1.229990
+2021-03-22 08:29:13,055 - Model - INFO - eval point avg class IoU: 0.423034
+2021-03-22 08:29:13,055 - Model - INFO - eval point accuracy: 0.780021
+2021-03-22 08:29:13,056 - Model - INFO - eval point avg class acc: 0.526156
+2021-03-22 08:29:13,056 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.866
+class floor weight: 0.203, IoU: 0.969
+class wall weight: 0.170, IoU: 0.709
+class beam weight: 0.274, IoU: 0.001
+class column weight: 0.000, IoU: 0.089
+class window weight: 0.019, IoU: 0.538
+class door weight: 0.033, IoU: 0.143
+class table weight: 0.030, IoU: 0.546
+class chair weight: 0.039, IoU: 0.421
+class sofa weight: 0.019, IoU: 0.048
+class bookcase weight: 0.003, IoU: 0.478
+class board weight: 0.109, IoU: 0.354
+class clutter weight: 0.011, IoU: 0.338
+
+2021-03-22 08:29:13,056 - Model - INFO - Eval mean loss: 1.229990
+2021-03-22 08:29:13,056 - Model - INFO - Eval accuracy: 0.780021
+2021-03-22 08:29:13,056 - Model - INFO - Best mIoU: 0.431278
+2021-03-22 08:29:13,056 - Model - INFO - **** Epoch 111 (111/128) ****
+2021-03-22 08:29:13,056 - Model - INFO - Learning rate:0.000020
+2021-03-22 08:36:29,197 - Model - INFO - Training mean loss: 0.131785
+2021-03-22 08:36:29,198 - Model - INFO - Training accuracy: 0.955059
+2021-03-22 08:36:29,198 - Model - INFO - Save model...
+2021-03-22 08:36:29,198 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/model.pth
+2021-03-22 08:36:29,368 - Model - INFO - Saving model....
+2021-03-22 08:36:29,893 - Model - INFO - ---- EPOCH 111 EVALUATION ----
+2021-03-22 08:38:11,304 - Model - INFO - eval mean loss: 1.177328
+2021-03-22 08:38:11,305 - Model - INFO - eval point avg class IoU: 0.431862
+2021-03-22 08:38:11,305 - Model - INFO - eval point accuracy: 0.784942
+2021-03-22 08:38:11,305 - Model - INFO - eval point avg class acc: 0.533072
+2021-03-22 08:38:11,305 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.865
+class floor weight: 0.202, IoU: 0.969
+class wall weight: 0.167, IoU: 0.723
+class beam weight: 0.280, IoU: 0.001
+class column weight: 0.000, IoU: 0.107
+class window weight: 0.017, IoU: 0.558
+class door weight: 0.034, IoU: 0.143
+class table weight: 0.031, IoU: 0.555
+class chair weight: 0.038, IoU: 0.442
+class sofa weight: 0.019, IoU: 0.063
+class bookcase weight: 0.003, IoU: 0.485
+class board weight: 0.107, IoU: 0.355
+class clutter weight: 0.012, IoU: 0.347
+
+2021-03-22 08:38:11,305 - Model - INFO - Eval mean loss: 1.177328
+2021-03-22 08:38:11,305 - Model - INFO - Eval accuracy: 0.784942
+2021-03-22 08:38:11,305 - Model - INFO - Save model...
+2021-03-22 08:38:11,306 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/best_model.pth
+2021-03-22 08:38:11,497 - Model - INFO - Saving model....
+2021-03-22 08:38:11,497 - Model - INFO - Best mIoU: 0.431862
+2021-03-22 08:38:11,497 - Model - INFO - **** Epoch 112 (112/128) ****
+2021-03-22 08:38:11,497 - Model - INFO - Learning rate:0.000020
+2021-03-22 08:45:18,358 - Model - INFO - Training mean loss: 0.131721
+2021-03-22 08:45:18,358 - Model - INFO - Training accuracy: 0.955167
+2021-03-22 08:45:18,791 - Model - INFO - ---- EPOCH 112 EVALUATION ----
+2021-03-22 08:46:57,223 - Model - INFO - eval mean loss: 1.217327
+2021-03-22 08:46:57,223 - Model - INFO - eval point avg class IoU: 0.426940
+2021-03-22 08:46:57,223 - Model - INFO - eval point accuracy: 0.779434
+2021-03-22 08:46:57,224 - Model - INFO - eval point avg class acc: 0.531377
+2021-03-22 08:46:57,224 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.093, IoU: 0.857
+class floor weight: 0.200, IoU: 0.972
+class wall weight: 0.167, IoU: 0.715
+class beam weight: 0.279, IoU: 0.001
+class column weight: 0.000, IoU: 0.105
+class window weight: 0.019, IoU: 0.539
+class door weight: 0.032, IoU: 0.145
+class table weight: 0.029, IoU: 0.554
+class chair weight: 0.039, IoU: 0.424
+class sofa weight: 0.018, IoU: 0.058
+class bookcase weight: 0.003, IoU: 0.478
+class board weight: 0.108, IoU: 0.354
+class clutter weight: 0.011, IoU: 0.349
+
+2021-03-22 08:46:57,224 - Model - INFO - Eval mean loss: 1.217327
+2021-03-22 08:46:57,224 - Model - INFO - Eval accuracy: 0.779434
+2021-03-22 08:46:57,224 - Model - INFO - Best mIoU: 0.431862
+2021-03-22 08:46:57,224 - Model - INFO - **** Epoch 113 (113/128) ****
+2021-03-22 08:46:57,224 - Model - INFO - Learning rate:0.000020
+2021-03-22 08:54:05,138 - Model - INFO - Training mean loss: 0.131188
+2021-03-22 08:54:05,138 - Model - INFO - Training accuracy: 0.955339
+2021-03-22 08:54:05,715 - Model - INFO - ---- EPOCH 113 EVALUATION ----
+2021-03-22 08:55:45,645 - Model - INFO - eval mean loss: 1.226141
+2021-03-22 08:55:45,646 - Model - INFO - eval point avg class IoU: 0.424434
+2021-03-22 08:55:45,646 - Model - INFO - eval point accuracy: 0.779908
+2021-03-22 08:55:45,646 - Model - INFO - eval point avg class acc: 0.526986
+2021-03-22 08:55:45,646 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.860
+class floor weight: 0.202, IoU: 0.967
+class wall weight: 0.169, IoU: 0.716
+class beam weight: 0.275, IoU: 0.001
+class column weight: 0.000, IoU: 0.090
+class window weight: 0.018, IoU: 0.553
+class door weight: 0.034, IoU: 0.146
+class table weight: 0.031, IoU: 0.555
+class chair weight: 0.040, IoU: 0.418
+class sofa weight: 0.018, IoU: 0.051
+class bookcase weight: 0.003, IoU: 0.460
+class board weight: 0.107, IoU: 0.356
+class clutter weight: 0.011, IoU: 0.345
+
+2021-03-22 08:55:45,647 - Model - INFO - Eval mean loss: 1.226141
+2021-03-22 08:55:45,647 - Model - INFO - Eval accuracy: 0.779908
+2021-03-22 08:55:45,647 - Model - INFO - Best mIoU: 0.431862
+2021-03-22 08:55:45,647 - Model - INFO - **** Epoch 114 (114/128) ****
+2021-03-22 08:55:45,647 - Model - INFO - Learning rate:0.000020
+2021-03-22 09:02:55,003 - Model - INFO - Training mean loss: 0.132045
+2021-03-22 09:02:55,003 - Model - INFO - Training accuracy: 0.955061
+2021-03-22 09:02:55,419 - Model - INFO - ---- EPOCH 114 EVALUATION ----
+2021-03-22 09:04:36,419 - Model - INFO - eval mean loss: 1.245777
+2021-03-22 09:04:36,419 - Model - INFO - eval point avg class IoU: 0.422650
+2021-03-22 09:04:36,419 - Model - INFO - eval point accuracy: 0.777055
+2021-03-22 09:04:36,419 - Model - INFO - eval point avg class acc: 0.523362
+2021-03-22 09:04:36,420 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.847
+class floor weight: 0.201, IoU: 0.972
+class wall weight: 0.168, IoU: 0.716
+class beam weight: 0.278, IoU: 0.000
+class column weight: 0.000, IoU: 0.080
+class window weight: 0.016, IoU: 0.555
+class door weight: 0.033, IoU: 0.140
+class table weight: 0.030, IoU: 0.559
+class chair weight: 0.040, IoU: 0.441
+class sofa weight: 0.019, IoU: 0.035
+class bookcase weight: 0.003, IoU: 0.466
+class board weight: 0.108, IoU: 0.346
+class clutter weight: 0.012, IoU: 0.337
+
+2021-03-22 09:04:36,420 - Model - INFO - Eval mean loss: 1.245777
+2021-03-22 09:04:36,420 - Model - INFO - Eval accuracy: 0.777055
+2021-03-22 09:04:36,420 - Model - INFO - Best mIoU: 0.431862
+2021-03-22 09:04:36,421 - Model - INFO - **** Epoch 115 (115/128) ****
+2021-03-22 09:04:36,421 - Model - INFO - Learning rate:0.000020
+2021-03-22 09:11:44,137 - Model - INFO - Training mean loss: 0.132276
+2021-03-22 09:11:44,137 - Model - INFO - Training accuracy: 0.955034
+2021-03-22 09:11:44,584 - Model - INFO - ---- EPOCH 115 EVALUATION ----
+2021-03-22 09:13:22,578 - Model - INFO - eval mean loss: 1.236688
+2021-03-22 09:13:22,578 - Model - INFO - eval point avg class IoU: 0.426845
+2021-03-22 09:13:22,583 - Model - INFO - eval point accuracy: 0.781015
+2021-03-22 09:13:22,583 - Model - INFO - eval point avg class acc: 0.527974
+2021-03-22 09:13:22,583 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.857
+class floor weight: 0.201, IoU: 0.976
+class wall weight: 0.166, IoU: 0.716
+class beam weight: 0.281, IoU: 0.001
+class column weight: 0.000, IoU: 0.098
+class window weight: 0.019, IoU: 0.571
+class door weight: 0.033, IoU: 0.145
+class table weight: 0.029, IoU: 0.542
+class chair weight: 0.038, IoU: 0.431
+class sofa weight: 0.018, IoU: 0.046
+class bookcase weight: 0.003, IoU: 0.484
+class board weight: 0.110, IoU: 0.348
+class clutter weight: 0.011, IoU: 0.335
+
+2021-03-22 09:13:22,583 - Model - INFO - Eval mean loss: 1.236688
+2021-03-22 09:13:22,583 - Model - INFO - Eval accuracy: 0.781015
+2021-03-22 09:13:22,583 - Model - INFO - Best mIoU: 0.431862
+2021-03-22 09:13:22,583 - Model - INFO - **** Epoch 116 (116/128) ****
+2021-03-22 09:13:22,583 - Model - INFO - Learning rate:0.000020
+2021-03-22 09:20:31,753 - Model - INFO - Training mean loss: 0.131654
+2021-03-22 09:20:31,753 - Model - INFO - Training accuracy: 0.955152
+2021-03-22 09:20:31,753 - Model - INFO - Save model...
+2021-03-22 09:20:31,754 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/model.pth
+2021-03-22 09:20:31,889 - Model - INFO - Saving model....
+2021-03-22 09:20:32,394 - Model - INFO - ---- EPOCH 116 EVALUATION ----
+2021-03-22 09:22:12,787 - Model - INFO - eval mean loss: 1.208956
+2021-03-22 09:22:12,787 - Model - INFO - eval point avg class IoU: 0.426714
+2021-03-22 09:22:12,787 - Model - INFO - eval point accuracy: 0.779970
+2021-03-22 09:22:12,787 - Model - INFO - eval point avg class acc: 0.530809
+2021-03-22 09:22:12,788 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.861
+class floor weight: 0.200, IoU: 0.970
+class wall weight: 0.168, IoU: 0.715
+class beam weight: 0.280, IoU: 0.001
+class column weight: 0.000, IoU: 0.091
+class window weight: 0.018, IoU: 0.541
+class door weight: 0.034, IoU: 0.164
+class table weight: 0.029, IoU: 0.549
+class chair weight: 0.039, IoU: 0.436
+class sofa weight: 0.019, IoU: 0.051
+class bookcase weight: 0.003, IoU: 0.473
+class board weight: 0.107, IoU: 0.359
+class clutter weight: 0.012, IoU: 0.336
+
+2021-03-22 09:22:12,788 - Model - INFO - Eval mean loss: 1.208956
+2021-03-22 09:22:12,788 - Model - INFO - Eval accuracy: 0.779970
+2021-03-22 09:22:12,788 - Model - INFO - Best mIoU: 0.431862
+2021-03-22 09:22:12,788 - Model - INFO - **** Epoch 117 (117/128) ****
+2021-03-22 09:22:12,788 - Model - INFO - Learning rate:0.000020
+2021-03-22 09:29:23,916 - Model - INFO - Training mean loss: 0.131296
+2021-03-22 09:29:23,916 - Model - INFO - Training accuracy: 0.955186
+2021-03-22 09:29:24,305 - Model - INFO - ---- EPOCH 117 EVALUATION ----
+2021-03-22 09:31:03,509 - Model - INFO - eval mean loss: 1.202740
+2021-03-22 09:31:03,510 - Model - INFO - eval point avg class IoU: 0.428608
+2021-03-22 09:31:03,510 - Model - INFO - eval point accuracy: 0.779866
+2021-03-22 09:31:03,510 - Model - INFO - eval point avg class acc: 0.534862
+2021-03-22 09:31:03,511 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.853
+class floor weight: 0.201, IoU: 0.969
+class wall weight: 0.168, IoU: 0.716
+class beam weight: 0.281, IoU: 0.002
+class column weight: 0.000, IoU: 0.105
+class window weight: 0.017, IoU: 0.546
+class door weight: 0.032, IoU: 0.145
+class table weight: 0.030, IoU: 0.548
+class chair weight: 0.038, IoU: 0.435
+class sofa weight: 0.019, IoU: 0.074
+class bookcase weight: 0.002, IoU: 0.475
+class board weight: 0.110, IoU: 0.366
+class clutter weight: 0.012, IoU: 0.337
+
+2021-03-22 09:31:03,511 - Model - INFO - Eval mean loss: 1.202740
+2021-03-22 09:31:03,511 - Model - INFO - Eval accuracy: 0.779866
+2021-03-22 09:31:03,511 - Model - INFO - Best mIoU: 0.431862
+2021-03-22 09:31:03,511 - Model - INFO - **** Epoch 118 (118/128) ****
+2021-03-22 09:31:03,511 - Model - INFO - Learning rate:0.000020
+2021-03-22 09:38:09,953 - Model - INFO - Training mean loss: 0.131856
+2021-03-22 09:38:09,954 - Model - INFO - Training accuracy: 0.955023
+2021-03-22 09:38:10,362 - Model - INFO - ---- EPOCH 118 EVALUATION ----
+2021-03-22 09:39:49,362 - Model - INFO - eval mean loss: 1.219308
+2021-03-22 09:39:49,362 - Model - INFO - eval point avg class IoU: 0.427407
+2021-03-22 09:39:49,362 - Model - INFO - eval point accuracy: 0.782402
+2021-03-22 09:39:49,362 - Model - INFO - eval point avg class acc: 0.530305
+2021-03-22 09:39:49,363 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.860
+class floor weight: 0.201, IoU: 0.972
+class wall weight: 0.168, IoU: 0.719
+class beam weight: 0.278, IoU: 0.001
+class column weight: 0.000, IoU: 0.087
+class window weight: 0.018, IoU: 0.557
+class door weight: 0.032, IoU: 0.156
+class table weight: 0.030, IoU: 0.557
+class chair weight: 0.040, IoU: 0.414
+class sofa weight: 0.019, IoU: 0.082
+class bookcase weight: 0.003, IoU: 0.475
+class board weight: 0.109, IoU: 0.332
+class clutter weight: 0.012, IoU: 0.344
+
+2021-03-22 09:39:49,363 - Model - INFO - Eval mean loss: 1.219308
+2021-03-22 09:39:49,364 - Model - INFO - Eval accuracy: 0.782402
+2021-03-22 09:39:49,364 - Model - INFO - Best mIoU: 0.431862
+2021-03-22 09:39:49,364 - Model - INFO - **** Epoch 119 (119/128) ****
+2021-03-22 09:39:49,364 - Model - INFO - Learning rate:0.000020
+2021-03-22 09:46:59,046 - Model - INFO - Training mean loss: 0.131786
+2021-03-22 09:46:59,046 - Model - INFO - Training accuracy: 0.955059
+2021-03-22 09:46:59,458 - Model - INFO - ---- EPOCH 119 EVALUATION ----
+2021-03-22 09:48:40,363 - Model - INFO - eval mean loss: 1.229583
+2021-03-22 09:48:40,363 - Model - INFO - eval point avg class IoU: 0.423862
+2021-03-22 09:48:40,363 - Model - INFO - eval point accuracy: 0.779721
+2021-03-22 09:48:40,363 - Model - INFO - eval point avg class acc: 0.526879
+2021-03-22 09:48:40,364 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.862
+class floor weight: 0.199, IoU: 0.968
+class wall weight: 0.166, IoU: 0.718
+class beam weight: 0.286, IoU: 0.001
+class column weight: 0.000, IoU: 0.093
+class window weight: 0.018, IoU: 0.544
+class door weight: 0.032, IoU: 0.146
+class table weight: 0.030, IoU: 0.551
+class chair weight: 0.038, IoU: 0.424
+class sofa weight: 0.018, IoU: 0.068
+class bookcase weight: 0.002, IoU: 0.467
+class board weight: 0.108, IoU: 0.331
+class clutter weight: 0.012, IoU: 0.337
+
+2021-03-22 09:48:40,364 - Model - INFO - Eval mean loss: 1.229583
+2021-03-22 09:48:40,364 - Model - INFO - Eval accuracy: 0.779721
+2021-03-22 09:48:40,364 - Model - INFO - Best mIoU: 0.431862
+2021-03-22 09:48:40,364 - Model - INFO - **** Epoch 120 (120/128) ****
+2021-03-22 09:48:40,364 - Model - INFO - Learning rate:0.000020
+2021-03-22 09:55:51,759 - Model - INFO - Training mean loss: 0.131215
+2021-03-22 09:55:51,759 - Model - INFO - Training accuracy: 0.955402
+2021-03-22 09:55:52,214 - Model - INFO - ---- EPOCH 120 EVALUATION ----
+2021-03-22 09:57:35,222 - Model - INFO - eval mean loss: 1.241015
+2021-03-22 09:57:35,222 - Model - INFO - eval point avg class IoU: 0.423199
+2021-03-22 09:57:35,222 - Model - INFO - eval point accuracy: 0.776063
+2021-03-22 09:57:35,222 - Model - INFO - eval point avg class acc: 0.521099
+2021-03-22 09:57:35,223 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.846
+class floor weight: 0.201, IoU: 0.972
+class wall weight: 0.169, IoU: 0.719
+class beam weight: 0.280, IoU: 0.001
+class column weight: 0.000, IoU: 0.109
+class window weight: 0.017, IoU: 0.520
+class door weight: 0.031, IoU: 0.129
+class table weight: 0.030, IoU: 0.550
+class chair weight: 0.039, IoU: 0.434
+class sofa weight: 0.019, IoU: 0.072
+class bookcase weight: 0.002, IoU: 0.484
+class board weight: 0.110, IoU: 0.340
+class clutter weight: 0.011, IoU: 0.327
+
+2021-03-22 09:57:35,223 - Model - INFO - Eval mean loss: 1.241015
+2021-03-22 09:57:35,223 - Model - INFO - Eval accuracy: 0.776063
+2021-03-22 09:57:35,223 - Model - INFO - Best mIoU: 0.431862
+2021-03-22 09:57:35,223 - Model - INFO - **** Epoch 121 (121/128) ****
+2021-03-22 09:57:35,223 - Model - INFO - Learning rate:0.000014
+2021-03-22 10:04:46,459 - Model - INFO - Training mean loss: 0.130260
+2021-03-22 10:04:46,459 - Model - INFO - Training accuracy: 0.955481
+2021-03-22 10:04:46,459 - Model - INFO - Save model...
+2021-03-22 10:04:46,459 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/model.pth
+2021-03-22 10:04:46,597 - Model - INFO - Saving model....
+2021-03-22 10:04:47,086 - Model - INFO - ---- EPOCH 121 EVALUATION ----
+2021-03-22 10:06:41,210 - Model - INFO - eval mean loss: 1.201745
+2021-03-22 10:06:41,211 - Model - INFO - eval point avg class IoU: 0.426170
+2021-03-22 10:06:41,211 - Model - INFO - eval point accuracy: 0.781765
+2021-03-22 10:06:41,211 - Model - INFO - eval point avg class acc: 0.526151
+2021-03-22 10:06:41,211 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.864
+class floor weight: 0.199, IoU: 0.971
+class wall weight: 0.166, IoU: 0.717
+class beam weight: 0.282, IoU: 0.001
+class column weight: 0.000, IoU: 0.120
+class window weight: 0.018, IoU: 0.551
+class door weight: 0.033, IoU: 0.139
+class table weight: 0.029, IoU: 0.543
+class chair weight: 0.038, IoU: 0.431
+class sofa weight: 0.019, IoU: 0.048
+class bookcase weight: 0.003, IoU: 0.474
+class board weight: 0.109, IoU: 0.339
+class clutter weight: 0.011, IoU: 0.344
+
+2021-03-22 10:06:41,211 - Model - INFO - Eval mean loss: 1.201745
+2021-03-22 10:06:41,211 - Model - INFO - Eval accuracy: 0.781765
+2021-03-22 10:06:41,211 - Model - INFO - Best mIoU: 0.431862
+2021-03-22 10:06:41,211 - Model - INFO - **** Epoch 122 (122/128) ****
+2021-03-22 10:06:41,212 - Model - INFO - Learning rate:0.000014
+2021-03-22 10:13:50,319 - Model - INFO - Training mean loss: 0.131934
+2021-03-22 10:13:50,319 - Model - INFO - Training accuracy: 0.954995
+2021-03-22 10:13:50,733 - Model - INFO - ---- EPOCH 122 EVALUATION ----
+2021-03-22 10:15:31,617 - Model - INFO - eval mean loss: 1.239823
+2021-03-22 10:15:31,617 - Model - INFO - eval point avg class IoU: 0.424403
+2021-03-22 10:15:31,617 - Model - INFO - eval point accuracy: 0.779590
+2021-03-22 10:15:31,617 - Model - INFO - eval point avg class acc: 0.525745
+2021-03-22 10:15:31,618 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.090, IoU: 0.858
+class floor weight: 0.201, IoU: 0.971
+class wall weight: 0.167, IoU: 0.716
+class beam weight: 0.280, IoU: 0.001
+class column weight: 0.000, IoU: 0.100
+class window weight: 0.017, IoU: 0.559
+class door weight: 0.032, IoU: 0.134
+class table weight: 0.030, IoU: 0.550
+class chair weight: 0.039, IoU: 0.424
+class sofa weight: 0.018, IoU: 0.058
+class bookcase weight: 0.003, IoU: 0.472
+class board weight: 0.109, IoU: 0.335
+class clutter weight: 0.012, IoU: 0.340
+
+2021-03-22 10:15:31,618 - Model - INFO - Eval mean loss: 1.239823
+2021-03-22 10:15:31,618 - Model - INFO - Eval accuracy: 0.779590
+2021-03-22 10:15:31,618 - Model - INFO - Best mIoU: 0.431862
+2021-03-22 10:15:31,618 - Model - INFO - **** Epoch 123 (123/128) ****
+2021-03-22 10:15:31,618 - Model - INFO - Learning rate:0.000014
+2021-03-22 10:22:43,514 - Model - INFO - Training mean loss: 0.130276
+2021-03-22 10:22:43,519 - Model - INFO - Training accuracy: 0.955554
+2021-03-22 10:22:43,967 - Model - INFO - ---- EPOCH 123 EVALUATION ----
+2021-03-22 10:24:25,131 - Model - INFO - eval mean loss: 1.257447
+2021-03-22 10:24:25,131 - Model - INFO - eval point avg class IoU: 0.418816
+2021-03-22 10:24:25,131 - Model - INFO - eval point accuracy: 0.776030
+2021-03-22 10:24:25,131 - Model - INFO - eval point avg class acc: 0.523923
+2021-03-22 10:24:25,131 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.862
+class floor weight: 0.199, IoU: 0.971
+class wall weight: 0.166, IoU: 0.703
+class beam weight: 0.281, IoU: 0.001
+class column weight: 0.000, IoU: 0.089
+class window weight: 0.018, IoU: 0.525
+class door weight: 0.036, IoU: 0.125
+class table weight: 0.031, IoU: 0.544
+class chair weight: 0.039, IoU: 0.417
+class sofa weight: 0.018, IoU: 0.046
+class bookcase weight: 0.003, IoU: 0.476
+class board weight: 0.108, IoU: 0.345
+class clutter weight: 0.011, IoU: 0.340
+
+2021-03-22 10:24:25,131 - Model - INFO - Eval mean loss: 1.257447
+2021-03-22 10:24:25,132 - Model - INFO - Eval accuracy: 0.776030
+2021-03-22 10:24:25,132 - Model - INFO - Best mIoU: 0.431862
+2021-03-22 10:24:25,132 - Model - INFO - **** Epoch 124 (124/128) ****
+2021-03-22 10:24:25,132 - Model - INFO - Learning rate:0.000014
+2021-03-22 10:31:32,449 - Model - INFO - Training mean loss: 0.129868
+2021-03-22 10:31:32,449 - Model - INFO - Training accuracy: 0.955850
+2021-03-22 10:31:32,842 - Model - INFO - ---- EPOCH 124 EVALUATION ----
+2021-03-22 10:33:10,242 - Model - INFO - eval mean loss: 1.213743
+2021-03-22 10:33:10,242 - Model - INFO - eval point avg class IoU: 0.425238
+2021-03-22 10:33:10,242 - Model - INFO - eval point accuracy: 0.778463
+2021-03-22 10:33:10,242 - Model - INFO - eval point avg class acc: 0.530882
+2021-03-22 10:33:10,242 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.861
+class floor weight: 0.200, IoU: 0.973
+class wall weight: 0.167, IoU: 0.714
+class beam weight: 0.280, IoU: 0.000
+class column weight: 0.000, IoU: 0.094
+class window weight: 0.018, IoU: 0.567
+class door weight: 0.032, IoU: 0.157
+class table weight: 0.031, IoU: 0.547
+class chair weight: 0.039, IoU: 0.431
+class sofa weight: 0.019, IoU: 0.049
+class bookcase weight: 0.003, IoU: 0.469
+class board weight: 0.110, IoU: 0.326
+class clutter weight: 0.011, IoU: 0.341
+
+2021-03-22 10:33:10,244 - Model - INFO - Eval mean loss: 1.213743
+2021-03-22 10:33:10,244 - Model - INFO - Eval accuracy: 0.778463
+2021-03-22 10:33:10,244 - Model - INFO - Best mIoU: 0.431862
+2021-03-22 10:33:10,244 - Model - INFO - **** Epoch 125 (125/128) ****
+2021-03-22 10:33:10,244 - Model - INFO - Learning rate:0.000014
+2021-03-22 10:40:14,778 - Model - INFO - Training mean loss: 0.129693
+2021-03-22 10:40:14,779 - Model - INFO - Training accuracy: 0.955758
+2021-03-22 10:40:15,212 - Model - INFO - ---- EPOCH 125 EVALUATION ----
+2021-03-22 10:41:58,011 - Model - INFO - eval mean loss: 1.223198
+2021-03-22 10:41:58,012 - Model - INFO - eval point avg class IoU: 0.422055
+2021-03-22 10:41:58,012 - Model - INFO - eval point accuracy: 0.777521
+2021-03-22 10:41:58,012 - Model - INFO - eval point avg class acc: 0.528173
+2021-03-22 10:41:58,012 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.092, IoU: 0.859
+class floor weight: 0.201, IoU: 0.972
+class wall weight: 0.168, IoU: 0.708
+class beam weight: 0.279, IoU: 0.001
+class column weight: 0.000, IoU: 0.098
+class window weight: 0.018, IoU: 0.532
+class door weight: 0.034, IoU: 0.151
+class table weight: 0.029, IoU: 0.551
+class chair weight: 0.039, IoU: 0.418
+class sofa weight: 0.018, IoU: 0.056
+class bookcase weight: 0.003, IoU: 0.472
+class board weight: 0.108, IoU: 0.325
+class clutter weight: 0.011, IoU: 0.344
+
+2021-03-22 10:41:58,012 - Model - INFO - Eval mean loss: 1.223198
+2021-03-22 10:41:58,012 - Model - INFO - Eval accuracy: 0.777521
+2021-03-22 10:41:58,013 - Model - INFO - Best mIoU: 0.431862
+2021-03-22 10:41:58,013 - Model - INFO - **** Epoch 126 (126/128) ****
+2021-03-22 10:41:58,013 - Model - INFO - Learning rate:0.000014
+2021-03-22 10:49:07,939 - Model - INFO - Training mean loss: 0.129827
+2021-03-22 10:49:07,939 - Model - INFO - Training accuracy: 0.955751
+2021-03-22 10:49:07,940 - Model - INFO - Save model...
+2021-03-22 10:49:07,940 - Model - INFO - Saving at log/sem_seg/pointnet_sem_seg/checkpoints/model.pth
+2021-03-22 10:49:08,270 - Model - INFO - Saving model....
+2021-03-22 10:49:08,685 - Model - INFO - ---- EPOCH 126 EVALUATION ----
+2021-03-22 10:50:55,660 - Model - INFO - eval mean loss: 1.205550
+2021-03-22 10:50:55,660 - Model - INFO - eval point avg class IoU: 0.423512
+2021-03-22 10:50:55,660 - Model - INFO - eval point accuracy: 0.777151
+2021-03-22 10:50:55,661 - Model - INFO - eval point avg class acc: 0.532734
+2021-03-22 10:50:55,661 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.852
+class floor weight: 0.201, IoU: 0.973
+class wall weight: 0.169, IoU: 0.712
+class beam weight: 0.280, IoU: 0.001
+class column weight: 0.000, IoU: 0.101
+class window weight: 0.017, IoU: 0.538
+class door weight: 0.032, IoU: 0.142
+class table weight: 0.031, IoU: 0.559
+class chair weight: 0.039, IoU: 0.444
+class sofa weight: 0.020, IoU: 0.044
+class bookcase weight: 0.003, IoU: 0.465
+class board weight: 0.106, IoU: 0.329
+class clutter weight: 0.011, IoU: 0.345
+
+2021-03-22 10:50:55,661 - Model - INFO - Eval mean loss: 1.205550
+2021-03-22 10:50:55,661 - Model - INFO - Eval accuracy: 0.777151
+2021-03-22 10:50:55,661 - Model - INFO - Best mIoU: 0.431862
+2021-03-22 10:50:55,661 - Model - INFO - **** Epoch 127 (127/128) ****
+2021-03-22 10:50:55,661 - Model - INFO - Learning rate:0.000014
+2021-03-22 10:58:02,163 - Model - INFO - Training mean loss: 0.130021
+2021-03-22 10:58:02,164 - Model - INFO - Training accuracy: 0.955682
+2021-03-22 10:58:02,620 - Model - INFO - ---- EPOCH 127 EVALUATION ----
+2021-03-22 10:59:43,826 - Model - INFO - eval mean loss: 1.251762
+2021-03-22 10:59:43,827 - Model - INFO - eval point avg class IoU: 0.423714
+2021-03-22 10:59:43,827 - Model - INFO - eval point accuracy: 0.777038
+2021-03-22 10:59:43,827 - Model - INFO - eval point avg class acc: 0.525790
+2021-03-22 10:59:43,827 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.853
+class floor weight: 0.199, IoU: 0.969
+class wall weight: 0.166, IoU: 0.711
+class beam weight: 0.282, IoU: 0.001
+class column weight: 0.000, IoU: 0.089
+class window weight: 0.019, IoU: 0.550
+class door weight: 0.034, IoU: 0.139
+class table weight: 0.030, IoU: 0.553
+class chair weight: 0.039, IoU: 0.432
+class sofa weight: 0.018, IoU: 0.060
+class bookcase weight: 0.003, IoU: 0.464
+class board weight: 0.108, IoU: 0.347
+class clutter weight: 0.011, IoU: 0.339
+
+2021-03-22 10:59:43,827 - Model - INFO - Eval mean loss: 1.251762
+2021-03-22 10:59:43,828 - Model - INFO - Eval accuracy: 0.777038
+2021-03-22 10:59:43,828 - Model - INFO - Best mIoU: 0.431862
+2021-03-22 10:59:43,828 - Model - INFO - **** Epoch 128 (128/128) ****
+2021-03-22 10:59:43,828 - Model - INFO - Learning rate:0.000014
+2021-03-22 11:06:50,928 - Model - INFO - Training mean loss: 0.129813
+2021-03-22 11:06:50,928 - Model - INFO - Training accuracy: 0.955790
+2021-03-22 11:06:51,356 - Model - INFO - ---- EPOCH 128 EVALUATION ----
+2021-03-22 11:08:30,698 - Model - INFO - eval mean loss: 1.252922
+2021-03-22 11:08:30,698 - Model - INFO - eval point avg class IoU: 0.425719
+2021-03-22 11:08:30,698 - Model - INFO - eval point accuracy: 0.778168
+2021-03-22 11:08:30,698 - Model - INFO - eval point avg class acc: 0.525732
+2021-03-22 11:08:30,698 - Model - INFO - ------- IoU --------
+class ceiling weight: 0.091, IoU: 0.857
+class floor weight: 0.201, IoU: 0.970
+class wall weight: 0.169, IoU: 0.709
+class beam weight: 0.278, IoU: 0.001
+class column weight: 0.000, IoU: 0.091
+class window weight: 0.018, IoU: 0.547
+class door weight: 0.032, IoU: 0.160
+class table weight: 0.031, IoU: 0.549
+class chair weight: 0.038, IoU: 0.432
+class sofa weight: 0.019, IoU: 0.066
+class bookcase weight: 0.003, IoU: 0.466
+class board weight: 0.108, IoU: 0.346
+class clutter weight: 0.012, IoU: 0.340
+
+2021-03-22 11:08:30,699 - Model - INFO - Eval mean loss: 1.252922
+2021-03-22 11:08:30,699 - Model - INFO - Eval accuracy: 0.778168
+2021-03-22 11:08:30,699 - Model - INFO - Best mIoU: 0.431862
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/sem_seg/pointnet_sem_seg/pointnet2_utils.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/sem_seg/pointnet_sem_seg/pointnet2_utils.py
new file mode 100644
index 0000000000..4ef8763727
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/sem_seg/pointnet_sem_seg/pointnet2_utils.py
@@ -0,0 +1,316 @@
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+from time import time
+import numpy as np
+
+def timeit(tag, t):
+ print("{}: {}s".format(tag, time() - t))
+ return time()
+
+def pc_normalize(pc):
+ l = pc.shape[0]
+ centroid = np.mean(pc, axis=0)
+ pc = pc - centroid
+ m = np.max(np.sqrt(np.sum(pc**2, axis=1)))
+ pc = pc / m
+ return pc
+
+def square_distance(src, dst):
+ """
+ Calculate Euclid distance between each two points.
+
+ src^T * dst = xn * xm + yn * ym + zn * zmï¼›
+ sum(src^2, dim=-1) = xn*xn + yn*yn + zn*zn;
+ sum(dst^2, dim=-1) = xm*xm + ym*ym + zm*zm;
+ dist = (xn - xm)^2 + (yn - ym)^2 + (zn - zm)^2
+ = sum(src**2,dim=-1)+sum(dst**2,dim=-1)-2*src^T*dst
+
+ Input:
+ src: source points, [B, N, C]
+ dst: target points, [B, M, C]
+ Output:
+ dist: per-point square distance, [B, N, M]
+ """
+ B, N, _ = src.shape
+ _, M, _ = dst.shape
+ dist = -2 * torch.matmul(src, dst.permute(0, 2, 1))
+ dist += torch.sum(src ** 2, -1).view(B, N, 1)
+ dist += torch.sum(dst ** 2, -1).view(B, 1, M)
+ return dist
+
+
+def index_points(points, idx):
+ """
+
+ Input:
+ points: input points data, [B, N, C]
+ idx: sample index data, [B, S]
+ Return:
+ new_points:, indexed points data, [B, S, C]
+ """
+ device = points.device
+ B = points.shape[0]
+ view_shape = list(idx.shape)
+ view_shape[1:] = [1] * (len(view_shape) - 1)
+ repeat_shape = list(idx.shape)
+ repeat_shape[0] = 1
+ batch_indices = torch.arange(B, dtype=torch.long).to(device).view(view_shape).repeat(repeat_shape)
+ new_points = points[batch_indices, idx, :]
+ return new_points
+
+
+def farthest_point_sample(xyz, npoint):
+ """
+ Input:
+ xyz: pointcloud data, [B, N, 3]
+ npoint: number of samples
+ Return:
+ centroids: sampled pointcloud index, [B, npoint]
+ """
+ device = xyz.device
+ B, N, C = xyz.shape
+ centroids = torch.zeros(B, npoint, dtype=torch.long).to(device)
+ distance = torch.ones(B, N).to(device) * 1e10
+ farthest = torch.randint(0, N, (B,), dtype=torch.long).to(device)
+ batch_indices = torch.arange(B, dtype=torch.long).to(device)
+ for i in range(npoint):
+ centroids[:, i] = farthest
+ centroid = xyz[batch_indices, farthest, :].view(B, 1, 3)
+ dist = torch.sum((xyz - centroid) ** 2, -1)
+ mask = dist < distance
+ distance[mask] = dist[mask]
+ farthest = torch.max(distance, -1)[1]
+ return centroids
+
+
+def query_ball_point(radius, nsample, xyz, new_xyz):
+ """
+ Input:
+ radius: local region radius
+ nsample: max sample number in local region
+ xyz: all points, [B, N, 3]
+ new_xyz: query points, [B, S, 3]
+ Return:
+ group_idx: grouped points index, [B, S, nsample]
+ """
+ device = xyz.device
+ B, N, C = xyz.shape
+ _, S, _ = new_xyz.shape
+ group_idx = torch.arange(N, dtype=torch.long).to(device).view(1, 1, N).repeat([B, S, 1])
+ sqrdists = square_distance(new_xyz, xyz)
+ group_idx[sqrdists > radius ** 2] = N
+ group_idx = group_idx.sort(dim=-1)[0][:, :, :nsample]
+ group_first = group_idx[:, :, 0].view(B, S, 1).repeat([1, 1, nsample])
+ mask = group_idx == N
+ group_idx[mask] = group_first[mask]
+ return group_idx
+
+
+def sample_and_group(npoint, radius, nsample, xyz, points, returnfps=False):
+ """
+ Input:
+ npoint:
+ radius:
+ nsample:
+ xyz: input points position data, [B, N, 3]
+ points: input points data, [B, N, D]
+ Return:
+ new_xyz: sampled points position data, [B, npoint, nsample, 3]
+ new_points: sampled points data, [B, npoint, nsample, 3+D]
+ """
+ B, N, C = xyz.shape
+ S = npoint
+ fps_idx = farthest_point_sample(xyz, npoint) # [B, npoint, C]
+ new_xyz = index_points(xyz, fps_idx)
+ idx = query_ball_point(radius, nsample, xyz, new_xyz)
+ grouped_xyz = index_points(xyz, idx) # [B, npoint, nsample, C]
+ grouped_xyz_norm = grouped_xyz - new_xyz.view(B, S, 1, C)
+
+ if points is not None:
+ grouped_points = index_points(points, idx)
+ new_points = torch.cat([grouped_xyz_norm, grouped_points], dim=-1) # [B, npoint, nsample, C+D]
+ else:
+ new_points = grouped_xyz_norm
+ if returnfps:
+ return new_xyz, new_points, grouped_xyz, fps_idx
+ else:
+ return new_xyz, new_points
+
+
+def sample_and_group_all(xyz, points):
+ """
+ Input:
+ xyz: input points position data, [B, N, 3]
+ points: input points data, [B, N, D]
+ Return:
+ new_xyz: sampled points position data, [B, 1, 3]
+ new_points: sampled points data, [B, 1, N, 3+D]
+ """
+ device = xyz.device
+ B, N, C = xyz.shape
+ new_xyz = torch.zeros(B, 1, C).to(device)
+ grouped_xyz = xyz.view(B, 1, N, C)
+ if points is not None:
+ new_points = torch.cat([grouped_xyz, points.view(B, 1, N, -1)], dim=-1)
+ else:
+ new_points = grouped_xyz
+ return new_xyz, new_points
+
+
+class PointNetSetAbstraction(nn.Module):
+ def __init__(self, npoint, radius, nsample, in_channel, mlp, group_all):
+ super(PointNetSetAbstraction, self).__init__()
+ self.npoint = npoint
+ self.radius = radius
+ self.nsample = nsample
+ self.mlp_convs = nn.ModuleList()
+ self.mlp_bns = nn.ModuleList()
+ last_channel = in_channel
+ for out_channel in mlp:
+ self.mlp_convs.append(nn.Conv2d(last_channel, out_channel, 1))
+ self.mlp_bns.append(nn.BatchNorm2d(out_channel))
+ last_channel = out_channel
+ self.group_all = group_all
+
+ def forward(self, xyz, points):
+ """
+ Input:
+ xyz: input points position data, [B, C, N]
+ points: input points data, [B, D, N]
+ Return:
+ new_xyz: sampled points position data, [B, C, S]
+ new_points_concat: sample points feature data, [B, D', S]
+ """
+ xyz = xyz.permute(0, 2, 1)
+ if points is not None:
+ points = points.permute(0, 2, 1)
+
+ if self.group_all:
+ new_xyz, new_points = sample_and_group_all(xyz, points)
+ else:
+ new_xyz, new_points = sample_and_group(self.npoint, self.radius, self.nsample, xyz, points)
+ # new_xyz: sampled points position data, [B, npoint, C]
+ # new_points: sampled points data, [B, npoint, nsample, C+D]
+ new_points = new_points.permute(0, 3, 2, 1) # [B, C+D, nsample,npoint]
+ for i, conv in enumerate(self.mlp_convs):
+ bn = self.mlp_bns[i]
+ new_points = F.relu(bn(conv(new_points)), inplace=True)
+
+ new_points = torch.max(new_points, 2)[0]
+ new_xyz = new_xyz.permute(0, 2, 1)
+ return new_xyz, new_points
+
+
+class PointNetSetAbstractionMsg(nn.Module):
+ def __init__(self, npoint, radius_list, nsample_list, in_channel, mlp_list):
+ super(PointNetSetAbstractionMsg, self).__init__()
+ self.npoint = npoint
+ self.radius_list = radius_list
+ self.nsample_list = nsample_list
+ self.conv_blocks = nn.ModuleList()
+ self.bn_blocks = nn.ModuleList()
+ for i in range(len(mlp_list)):
+ convs = nn.ModuleList()
+ bns = nn.ModuleList()
+ last_channel = in_channel + 3
+ for out_channel in mlp_list[i]:
+ convs.append(nn.Conv2d(last_channel, out_channel, 1))
+ bns.append(nn.BatchNorm2d(out_channel))
+ last_channel = out_channel
+ self.conv_blocks.append(convs)
+ self.bn_blocks.append(bns)
+
+ def forward(self, xyz, points):
+ """
+ Input:
+ xyz: input points position data, [B, C, N]
+ points: input points data, [B, D, N]
+ Return:
+ new_xyz: sampled points position data, [B, C, S]
+ new_points_concat: sample points feature data, [B, D', S]
+ """
+ xyz = xyz.permute(0, 2, 1)
+ if points is not None:
+ points = points.permute(0, 2, 1)
+
+ B, N, C = xyz.shape
+ S = self.npoint
+ new_xyz = index_points(xyz, farthest_point_sample(xyz, S))
+ new_points_list = []
+ for i, radius in enumerate(self.radius_list):
+ K = self.nsample_list[i]
+ group_idx = query_ball_point(radius, K, xyz, new_xyz)
+ grouped_xyz = index_points(xyz, group_idx)
+ grouped_xyz -= new_xyz.view(B, S, 1, C)
+ if points is not None:
+ grouped_points = index_points(points, group_idx)
+ grouped_points = torch.cat([grouped_points, grouped_xyz], dim=-1)
+ else:
+ grouped_points = grouped_xyz
+
+ grouped_points = grouped_points.permute(0, 3, 2, 1) # [B, D, K, S]
+ for j in range(len(self.conv_blocks[i])):
+ conv = self.conv_blocks[i][j]
+ bn = self.bn_blocks[i][j]
+ grouped_points = F.relu(bn(conv(grouped_points)), inplace=True)
+ new_points = torch.max(grouped_points, 2)[0] # [B, D', S]
+ new_points_list.append(new_points)
+
+ new_xyz = new_xyz.permute(0, 2, 1)
+ new_points_concat = torch.cat(new_points_list, dim=1)
+ return new_xyz, new_points_concat
+
+
+class PointNetFeaturePropagation(nn.Module):
+ def __init__(self, in_channel, mlp):
+ super(PointNetFeaturePropagation, self).__init__()
+ self.mlp_convs = nn.ModuleList()
+ self.mlp_bns = nn.ModuleList()
+ last_channel = in_channel
+ for out_channel in mlp:
+ self.mlp_convs.append(nn.Conv1d(last_channel, out_channel, 1))
+ self.mlp_bns.append(nn.BatchNorm1d(out_channel))
+ last_channel = out_channel
+
+ def forward(self, xyz1, xyz2, points1, points2):
+ """
+ Input:
+ xyz1: input points position data, [B, C, N]
+ xyz2: sampled input points position data, [B, C, S]
+ points1: input points data, [B, D, N]
+ points2: input points data, [B, D, S]
+ Return:
+ new_points: upsampled points data, [B, D', N]
+ """
+ xyz1 = xyz1.permute(0, 2, 1)
+ xyz2 = xyz2.permute(0, 2, 1)
+
+ points2 = points2.permute(0, 2, 1)
+ B, N, C = xyz1.shape
+ _, S, _ = xyz2.shape
+
+ if S == 1:
+ interpolated_points = points2.repeat(1, N, 1)
+ else:
+ dists = square_distance(xyz1, xyz2)
+ dists, idx = dists.sort(dim=-1)
+ dists, idx = dists[:, :, :3], idx[:, :, :3] # [B, N, 3]
+
+ dist_recip = 1.0 / (dists + 1e-8)
+ norm = torch.sum(dist_recip, dim=2, keepdim=True)
+ weight = dist_recip / norm
+ interpolated_points = torch.sum(index_points(points2, idx) * weight.view(B, N, 3, 1), dim=2)
+
+ if points1 is not None:
+ points1 = points1.permute(0, 2, 1)
+ new_points = torch.cat([points1, interpolated_points], dim=-1)
+ else:
+ new_points = interpolated_points
+
+ new_points = new_points.permute(0, 2, 1)
+ for i, conv in enumerate(self.mlp_convs):
+ bn = self.mlp_bns[i]
+ new_points = F.relu(bn(conv(new_points)), inplace=True)
+ return new_points
+
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/sem_seg/pointnet_sem_seg/pointnet_sem_seg.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/sem_seg/pointnet_sem_seg/pointnet_sem_seg.py
new file mode 100644
index 0000000000..aa480daee4
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/log/sem_seg/pointnet_sem_seg/pointnet_sem_seg.py
@@ -0,0 +1,50 @@
+import torch
+import torch.nn as nn
+import torch.nn.parallel
+import torch.utils.data
+import torch.nn.functional as F
+from pointnet_utils import PointNetEncoder, feature_transform_reguliarzer
+
+
+class get_model(nn.Module):
+ def __init__(self, num_class):
+ super(get_model, self).__init__()
+ self.k = num_class
+ self.feat = PointNetEncoder(global_feat=False, feature_transform=True, channel=9)
+ self.conv1 = torch.nn.Conv1d(1088, 512, 1)
+ self.conv2 = torch.nn.Conv1d(512, 256, 1)
+ self.conv3 = torch.nn.Conv1d(256, 128, 1)
+ self.conv4 = torch.nn.Conv1d(128, self.k, 1)
+ self.bn1 = nn.BatchNorm1d(512)
+ self.bn2 = nn.BatchNorm1d(256)
+ self.bn3 = nn.BatchNorm1d(128)
+
+ def forward(self, x):
+ batchsize = x.size()[0]
+ n_pts = x.size()[2]
+ x, trans, trans_feat = self.feat(x)
+ x = F.relu(self.bn1(self.conv1(x)), inplace=True)
+ x = F.relu(self.bn2(self.conv2(x)), inplace=True)
+ x = F.relu(self.bn3(self.conv3(x)), inplace=True)
+ x = self.conv4(x)
+ x = x.transpose(2,1).contiguous()
+ x = F.log_softmax(x.view(-1,self.k), dim=-1)
+ x = x.view(batchsize, n_pts, self.k)
+ return x, trans_feat
+
+class get_loss(torch.nn.Module):
+ def __init__(self, mat_diff_loss_scale=0.001):
+ super(get_loss, self).__init__()
+ self.mat_diff_loss_scale = mat_diff_loss_scale
+
+ def forward(self, pred, target, trans_feat, weight):
+ loss = F.nll_loss(pred, target, weight = weight)
+ mat_diff_loss = feature_transform_reguliarzer(trans_feat)
+ total_loss = loss + mat_diff_loss * self.mat_diff_loss_scale
+ return total_loss
+
+
+if __name__ == '__main__':
+ model = get_model(13)
+ xyz = torch.rand(12, 3, 2048)
+ (model(xyz))
\ No newline at end of file
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet2_cls_msg.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet2_cls_msg.py
new file mode 100644
index 0000000000..10593c38c7
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet2_cls_msg.py
@@ -0,0 +1,51 @@
+import torch.nn as nn
+import torch.nn.functional as F
+from pointnet2_utils import PointNetSetAbstractionMsg, PointNetSetAbstraction
+
+
+class get_model(nn.Module):
+ def __init__(self,num_class,normal_channel=True):
+ super(get_model, self).__init__()
+ in_channel = 3 if normal_channel else 0
+ self.normal_channel = normal_channel
+ self.sa1 = PointNetSetAbstractionMsg(512, [0.1, 0.2, 0.4], [16, 32, 128], in_channel,[[32, 32, 64], [64, 64, 128], [64, 96, 128]])
+ self.sa2 = PointNetSetAbstractionMsg(128, [0.2, 0.4, 0.8], [32, 64, 128], 320,[[64, 64, 128], [128, 128, 256], [128, 128, 256]])
+ self.sa3 = PointNetSetAbstraction(None, None, None, 640 + 3, [256, 512, 1024], True)
+ self.fc1 = nn.Linear(1024, 512)
+ self.bn1 = nn.BatchNorm1d(512)
+ self.drop1 = nn.Dropout(0.4)
+ self.fc2 = nn.Linear(512, 256)
+ self.bn2 = nn.BatchNorm1d(256)
+ self.drop2 = nn.Dropout(0.5)
+ self.fc3 = nn.Linear(256, num_class)
+
+ def forward(self, xyz):
+ B, _, _ = xyz.shape
+ if self.normal_channel:
+ norm = xyz[:, 3:, :]
+ xyz = xyz[:, :3, :]
+ else:
+ norm = None
+ l1_xyz, l1_points = self.sa1(xyz, norm)
+ l2_xyz, l2_points = self.sa2(l1_xyz, l1_points)
+ l3_xyz, l3_points = self.sa3(l2_xyz, l2_points)
+ x = l3_points.view(B, 1024)
+ x = self.drop1(F.relu(self.bn1(self.fc1(x))))
+ x = self.drop2(F.relu(self.bn2(self.fc2(x))))
+ x = self.fc3(x)
+ x = F.log_softmax(x, -1)
+
+
+ return x,l3_points
+
+
+class get_loss(nn.Module):
+ def __init__(self):
+ super(get_loss, self).__init__()
+
+ def forward(self, pred, target, trans_feat):
+ total_loss = F.nll_loss(pred, target)
+
+ return total_loss
+
+
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet2_cls_ssg.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet2_cls_ssg.py
new file mode 100644
index 0000000000..4e14ba6a5e
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet2_cls_ssg.py
@@ -0,0 +1,50 @@
+import torch.nn as nn
+import torch.nn.functional as F
+from pointnet2_utils import PointNetSetAbstraction
+
+
+class get_model(nn.Module):
+ def __init__(self,num_class,normal_channel=True):
+ super(get_model, self).__init__()
+ in_channel = 6 if normal_channel else 3
+ self.normal_channel = normal_channel
+ self.sa1 = PointNetSetAbstraction(npoint=512, radius=0.2, nsample=32, in_channel=in_channel, mlp=[64, 64, 128], group_all=False)
+ self.sa2 = PointNetSetAbstraction(npoint=128, radius=0.4, nsample=64, in_channel=128 + 3, mlp=[128, 128, 256], group_all=False)
+ self.sa3 = PointNetSetAbstraction(npoint=None, radius=None, nsample=None, in_channel=256 + 3, mlp=[256, 512, 1024], group_all=True)
+ self.fc1 = nn.Linear(1024, 512)
+ self.bn1 = nn.BatchNorm1d(512)
+ self.drop1 = nn.Dropout(0.4)
+ self.fc2 = nn.Linear(512, 256)
+ self.bn2 = nn.BatchNorm1d(256)
+ self.drop2 = nn.Dropout(0.4)
+ self.fc3 = nn.Linear(256, num_class)
+
+ def forward(self, xyz):
+ B, _, _ = xyz.shape
+ if self.normal_channel:
+ norm = xyz[:, 3:, :]
+ xyz = xyz[:, :3, :]
+ else:
+ norm = None
+ l1_xyz, l1_points = self.sa1(xyz, norm)
+ l2_xyz, l2_points = self.sa2(l1_xyz, l1_points)
+ l3_xyz, l3_points = self.sa3(l2_xyz, l2_points)
+ x = l3_points.view(B, 1024)
+ x = self.drop1(F.relu(self.bn1(self.fc1(x))))
+ x = self.drop2(F.relu(self.bn2(self.fc2(x))))
+ x = self.fc3(x)
+ x = F.log_softmax(x, -1)
+
+
+ return x, l3_points
+
+
+
+class get_loss(nn.Module):
+ def __init__(self):
+ super(get_loss, self).__init__()
+
+ def forward(self, pred, target, trans_feat):
+ total_loss = F.nll_loss(pred, target)
+
+ return total_loss
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet2_part_seg_msg.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet2_part_seg_msg.py
new file mode 100644
index 0000000000..f8018f6404
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet2_part_seg_msg.py
@@ -0,0 +1,59 @@
+import torch.nn as nn
+import torch
+import torch.nn.functional as F
+from models.pointnet2_utils import PointNetSetAbstractionMsg,PointNetSetAbstraction,PointNetFeaturePropagation
+
+
+class get_model(nn.Module):
+ def __init__(self, num_classes, normal_channel=False):
+ super(get_model, self).__init__()
+ if normal_channel:
+ additional_channel = 3
+ else:
+ additional_channel = 0
+ self.normal_channel = normal_channel
+ self.sa1 = PointNetSetAbstractionMsg(512, [0.1, 0.2, 0.4], [32, 64, 128], 3+additional_channel, [[32, 32, 64], [64, 64, 128], [64, 96, 128]])
+ self.sa2 = PointNetSetAbstractionMsg(128, [0.4,0.8], [64, 128], 128+128+64, [[128, 128, 256], [128, 196, 256]])
+ self.sa3 = PointNetSetAbstraction(npoint=None, radius=None, nsample=None, in_channel=512 + 3, mlp=[256, 512, 1024], group_all=True)
+ self.fp3 = PointNetFeaturePropagation(in_channel=1536, mlp=[256, 256])
+ self.fp2 = PointNetFeaturePropagation(in_channel=576, mlp=[256, 128])
+ self.fp1 = PointNetFeaturePropagation(in_channel=150+additional_channel, mlp=[128, 128])
+ self.conv1 = nn.Conv1d(128, 128, 1)
+ self.bn1 = nn.BatchNorm1d(128)
+ self.drop1 = nn.Dropout(0.5)
+ self.conv2 = nn.Conv1d(128, num_classes, 1)
+
+ def forward(self, xyz, cls_label):
+ # Set Abstraction layers
+ B,C,N = xyz.shape
+ if self.normal_channel:
+ l0_points = xyz
+ l0_xyz = xyz[:,:3,:]
+ else:
+ l0_points = xyz
+ l0_xyz = xyz
+ l1_xyz, l1_points = self.sa1(l0_xyz, l0_points)
+ l2_xyz, l2_points = self.sa2(l1_xyz, l1_points)
+ l3_xyz, l3_points = self.sa3(l2_xyz, l2_points)
+ # Feature Propagation layers
+ l2_points = self.fp3(l2_xyz, l3_xyz, l2_points, l3_points)
+ l1_points = self.fp2(l1_xyz, l2_xyz, l1_points, l2_points)
+ cls_label_one_hot = cls_label.view(B,16,1).repeat(1,1,N)
+ l0_points = self.fp1(l0_xyz, l1_xyz, torch.cat([cls_label_one_hot,l0_xyz,l0_points],1), l1_points)
+ # FC layers
+ feat = F.relu(self.bn1(self.conv1(l0_points)))
+ x = self.drop1(feat)
+ x = self.conv2(x)
+ x = F.log_softmax(x, dim=1)
+ x = x.permute(0, 2, 1)
+ return x, l3_points
+
+
+class get_loss(nn.Module):
+ def __init__(self):
+ super(get_loss, self).__init__()
+
+ def forward(self, pred, target, trans_feat):
+ total_loss = F.nll_loss(pred, target)
+
+ return total_loss
\ No newline at end of file
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet2_part_seg_ssg.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet2_part_seg_ssg.py
new file mode 100644
index 0000000000..11709e9f2f
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet2_part_seg_ssg.py
@@ -0,0 +1,59 @@
+import torch.nn as nn
+import torch
+import torch.nn.functional as F
+from models.pointnet2_utils import PointNetSetAbstraction,PointNetFeaturePropagation
+
+
+class get_model(nn.Module):
+ def __init__(self, num_classes, normal_channel=False):
+ super(get_model, self).__init__()
+ if normal_channel:
+ additional_channel = 3
+ else:
+ additional_channel = 0
+ self.normal_channel = normal_channel
+ self.sa1 = PointNetSetAbstraction(npoint=512, radius=0.2, nsample=32, in_channel=6+additional_channel, mlp=[64, 64, 128], group_all=False)
+ self.sa2 = PointNetSetAbstraction(npoint=128, radius=0.4, nsample=64, in_channel=128 + 3, mlp=[128, 128, 256], group_all=False)
+ self.sa3 = PointNetSetAbstraction(npoint=None, radius=None, nsample=None, in_channel=256 + 3, mlp=[256, 512, 1024], group_all=True)
+ self.fp3 = PointNetFeaturePropagation(in_channel=1280, mlp=[256, 256])
+ self.fp2 = PointNetFeaturePropagation(in_channel=384, mlp=[256, 128])
+ self.fp1 = PointNetFeaturePropagation(in_channel=128+16+6+additional_channel, mlp=[128, 128, 128])
+ self.conv1 = nn.Conv1d(128, 128, 1)
+ self.bn1 = nn.BatchNorm1d(128)
+ self.drop1 = nn.Dropout(0.5)
+ self.conv2 = nn.Conv1d(128, num_classes, 1)
+
+ def forward(self, xyz, cls_label):
+ # Set Abstraction layers
+ B,C,N = xyz.shape
+ if self.normal_channel:
+ l0_points = xyz
+ l0_xyz = xyz[:,:3,:]
+ else:
+ l0_points = xyz
+ l0_xyz = xyz
+ l1_xyz, l1_points = self.sa1(l0_xyz, l0_points)
+ l2_xyz, l2_points = self.sa2(l1_xyz, l1_points)
+ l3_xyz, l3_points = self.sa3(l2_xyz, l2_points)
+ # Feature Propagation layers
+ l2_points = self.fp3(l2_xyz, l3_xyz, l2_points, l3_points)
+ l1_points = self.fp2(l1_xyz, l2_xyz, l1_points, l2_points)
+ cls_label_one_hot = cls_label.view(B,16,1).repeat(1,1,N)
+ l0_points = self.fp1(l0_xyz, l1_xyz, torch.cat([cls_label_one_hot,l0_xyz,l0_points],1), l1_points)
+ # FC layers
+ feat = F.relu(self.bn1(self.conv1(l0_points)))
+ x = self.drop1(feat)
+ x = self.conv2(x)
+ x = F.log_softmax(x, dim=1)
+ x = x.permute(0, 2, 1)
+ return x, l3_points
+
+
+class get_loss(nn.Module):
+ def __init__(self):
+ super(get_loss, self).__init__()
+
+ def forward(self, pred, target, trans_feat):
+ total_loss = F.nll_loss(pred, target)
+
+ return total_loss
\ No newline at end of file
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet2_sem_seg.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet2_sem_seg.py
new file mode 100644
index 0000000000..ef51e96a68
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet2_sem_seg.py
@@ -0,0 +1,55 @@
+import torch.nn as nn
+import torch.nn.functional as F
+from models.pointnet2_utils import PointNetSetAbstraction,PointNetFeaturePropagation
+
+
+class get_model(nn.Module):
+ def __init__(self, num_classes):
+ super(get_model, self).__init__()
+ self.sa1 = PointNetSetAbstraction(1024, 0.1, 32, 9 + 3, [32, 32, 64], False)
+ self.sa2 = PointNetSetAbstraction(256, 0.2, 32, 64 + 3, [64, 64, 128], False)
+ self.sa3 = PointNetSetAbstraction(64, 0.4, 32, 128 + 3, [128, 128, 256], False)
+ self.sa4 = PointNetSetAbstraction(16, 0.8, 32, 256 + 3, [256, 256, 512], False)
+ self.fp4 = PointNetFeaturePropagation(768, [256, 256])
+ self.fp3 = PointNetFeaturePropagation(384, [256, 256])
+ self.fp2 = PointNetFeaturePropagation(320, [256, 128])
+ self.fp1 = PointNetFeaturePropagation(128, [128, 128, 128])
+ self.conv1 = nn.Conv1d(128, 128, 1)
+ self.bn1 = nn.BatchNorm1d(128)
+ self.drop1 = nn.Dropout(0.5)
+ self.conv2 = nn.Conv1d(128, num_classes, 1)
+
+ def forward(self, xyz):
+ l0_points = xyz
+ l0_xyz = xyz[:,:3,:]
+
+ l1_xyz, l1_points = self.sa1(l0_xyz, l0_points)
+ l2_xyz, l2_points = self.sa2(l1_xyz, l1_points)
+ l3_xyz, l3_points = self.sa3(l2_xyz, l2_points)
+ l4_xyz, l4_points = self.sa4(l3_xyz, l3_points)
+
+ l3_points = self.fp4(l3_xyz, l4_xyz, l3_points, l4_points)
+ l2_points = self.fp3(l2_xyz, l3_xyz, l2_points, l3_points)
+ l1_points = self.fp2(l1_xyz, l2_xyz, l1_points, l2_points)
+ l0_points = self.fp1(l0_xyz, l1_xyz, None, l1_points)
+
+ x = self.drop1(F.relu(self.bn1(self.conv1(l0_points))))
+ x = self.conv2(x)
+ x = F.log_softmax(x, dim=1)
+ x = x.permute(0, 2, 1)
+ return x, l4_points
+
+
+class get_loss(nn.Module):
+ def __init__(self):
+ super(get_loss, self).__init__()
+ def forward(self, pred, target, trans_feat, weight):
+ total_loss = F.nll_loss(pred, target, weight=weight)
+
+ return total_loss
+
+if __name__ == '__main__':
+ import torch
+ model = get_model(13)
+ xyz = torch.rand(6, 9, 2048)
+ (model(xyz))
\ No newline at end of file
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet2_sem_seg_msg.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet2_sem_seg_msg.py
new file mode 100644
index 0000000000..5fa67159f2
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet2_sem_seg_msg.py
@@ -0,0 +1,56 @@
+import torch.nn as nn
+import torch.nn.functional as F
+from models.pointnet2_utils import PointNetSetAbstractionMsg,PointNetFeaturePropagation
+
+
+class get_model(nn.Module):
+ def __init__(self, num_classes):
+ super(get_model, self).__init__()
+
+ self.sa1 = PointNetSetAbstractionMsg(1024, [0.05, 0.1], [16, 32], 9, [[16, 16, 32], [32, 32, 64]])
+ self.sa2 = PointNetSetAbstractionMsg(256, [0.1, 0.2], [16, 32], 32+64, [[64, 64, 128], [64, 96, 128]])
+ self.sa3 = PointNetSetAbstractionMsg(64, [0.2, 0.4], [16, 32], 128+128, [[128, 196, 256], [128, 196, 256]])
+ self.sa4 = PointNetSetAbstractionMsg(16, [0.4, 0.8], [16, 32], 256+256, [[256, 256, 512], [256, 384, 512]])
+ self.fp4 = PointNetFeaturePropagation(512+512+256+256, [256, 256])
+ self.fp3 = PointNetFeaturePropagation(128+128+256, [256, 256])
+ self.fp2 = PointNetFeaturePropagation(32+64+256, [256, 128])
+ self.fp1 = PointNetFeaturePropagation(128, [128, 128, 128])
+ self.conv1 = nn.Conv1d(128, 128, 1)
+ self.bn1 = nn.BatchNorm1d(128)
+ self.drop1 = nn.Dropout(0.5)
+ self.conv2 = nn.Conv1d(128, num_classes, 1)
+
+ def forward(self, xyz):
+ l0_points = xyz
+ l0_xyz = xyz[:,:3,:]
+
+ l1_xyz, l1_points = self.sa1(l0_xyz, l0_points)
+ l2_xyz, l2_points = self.sa2(l1_xyz, l1_points)
+ l3_xyz, l3_points = self.sa3(l2_xyz, l2_points)
+ l4_xyz, l4_points = self.sa4(l3_xyz, l3_points)
+
+ l3_points = self.fp4(l3_xyz, l4_xyz, l3_points, l4_points)
+ l2_points = self.fp3(l2_xyz, l3_xyz, l2_points, l3_points)
+ l1_points = self.fp2(l1_xyz, l2_xyz, l1_points, l2_points)
+ l0_points = self.fp1(l0_xyz, l1_xyz, None, l1_points)
+
+ x = self.drop1(F.relu(self.bn1(self.conv1(l0_points))))
+ x = self.conv2(x)
+ x = F.log_softmax(x, dim=1)
+ x = x.permute(0, 2, 1)
+ return x, l4_points
+
+
+class get_loss(nn.Module):
+ def __init__(self):
+ super(get_loss, self).__init__()
+ def forward(self, pred, target, trans_feat, weight):
+ total_loss = F.nll_loss(pred, target, weight=weight)
+
+ return total_loss
+
+if __name__ == '__main__':
+ import torch
+ model = get_model(13)
+ xyz = torch.rand(6, 9, 2048)
+ (model(xyz))
\ No newline at end of file
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet2_utils.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet2_utils.py
new file mode 100644
index 0000000000..d13986ed62
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet2_utils.py
@@ -0,0 +1,316 @@
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+from time import time
+import numpy as np
+
+def timeit(tag, t):
+ print("{}: {}s".format(tag, time() - t))
+ return time()
+
+def pc_normalize(pc):
+ l = pc.shape[0]
+ centroid = np.mean(pc, axis=0)
+ pc = pc - centroid
+ m = np.max(np.sqrt(np.sum(pc**2, axis=1)))
+ pc = pc / m
+ return pc
+
+def square_distance(src, dst):
+ """
+ Calculate Euclid distance between each two points.
+
+ src^T * dst = xn * xm + yn * ym + zn * zmï¼›
+ sum(src^2, dim=-1) = xn*xn + yn*yn + zn*zn;
+ sum(dst^2, dim=-1) = xm*xm + ym*ym + zm*zm;
+ dist = (xn - xm)^2 + (yn - ym)^2 + (zn - zm)^2
+ = sum(src**2,dim=-1)+sum(dst**2,dim=-1)-2*src^T*dst
+
+ Input:
+ src: source points, [B, N, C]
+ dst: target points, [B, M, C]
+ Output:
+ dist: per-point square distance, [B, N, M]
+ """
+ B, N, _ = src.shape
+ _, M, _ = dst.shape
+ dist = -2 * torch.matmul(src, dst.permute(0, 2, 1))
+ dist += torch.sum(src ** 2, -1).view(B, N, 1)
+ dist += torch.sum(dst ** 2, -1).view(B, 1, M)
+ return dist
+
+
+def index_points(points, idx):
+ """
+
+ Input:
+ points: input points data, [B, N, C]
+ idx: sample index data, [B, S]
+ Return:
+ new_points:, indexed points data, [B, S, C]
+ """
+ device = points.device
+ B = points.shape[0]
+ view_shape = list(idx.shape)
+ view_shape[1:] = [1] * (len(view_shape) - 1)
+ repeat_shape = list(idx.shape)
+ repeat_shape[0] = 1
+ batch_indices = torch.arange(B, dtype=torch.long).to(device).view(view_shape).repeat(repeat_shape)
+ new_points = points[batch_indices, idx, :]
+ return new_points
+
+
+def farthest_point_sample(xyz, npoint):
+ """
+ Input:
+ xyz: pointcloud data, [B, N, 3]
+ npoint: number of samples
+ Return:
+ centroids: sampled pointcloud index, [B, npoint]
+ """
+ device = xyz.device
+ B, N, C = xyz.shape
+ centroids = torch.zeros(B, npoint, dtype=torch.long).to(device)
+ distance = torch.ones(B, N).to(device) * 1e10
+ farthest = torch.randint(0, N, (B,), dtype=torch.long).to(device)
+ batch_indices = torch.arange(B, dtype=torch.long).to(device)
+ for i in range(npoint):
+ centroids[:, i] = farthest
+ centroid = xyz[batch_indices, farthest, :].view(B, 1, 3)
+ dist = torch.sum((xyz - centroid) ** 2, -1)
+ mask = dist < distance
+ distance[mask] = dist[mask]
+ farthest = torch.max(distance, -1)[1]
+ return centroids
+
+
+def query_ball_point(radius, nsample, xyz, new_xyz):
+ """
+ Input:
+ radius: local region radius
+ nsample: max sample number in local region
+ xyz: all points, [B, N, 3]
+ new_xyz: query points, [B, S, 3]
+ Return:
+ group_idx: grouped points index, [B, S, nsample]
+ """
+ device = xyz.device
+ B, N, C = xyz.shape
+ _, S, _ = new_xyz.shape
+ group_idx = torch.arange(N, dtype=torch.long).to(device).view(1, 1, N).repeat([B, S, 1])
+ sqrdists = square_distance(new_xyz, xyz)
+ group_idx[sqrdists > radius ** 2] = N
+ group_idx = group_idx.sort(dim=-1)[0][:, :, :nsample]
+ group_first = group_idx[:, :, 0].view(B, S, 1).repeat([1, 1, nsample])
+ mask = group_idx == N
+ group_idx[mask] = group_first[mask]
+ return group_idx
+
+
+def sample_and_group(npoint, radius, nsample, xyz, points, returnfps=False):
+ """
+ Input:
+ npoint:
+ radius:
+ nsample:
+ xyz: input points position data, [B, N, 3]
+ points: input points data, [B, N, D]
+ Return:
+ new_xyz: sampled points position data, [B, npoint, nsample, 3]
+ new_points: sampled points data, [B, npoint, nsample, 3+D]
+ """
+ B, N, C = xyz.shape
+ S = npoint
+ fps_idx = farthest_point_sample(xyz, npoint) # [B, npoint, C]
+ new_xyz = index_points(xyz, fps_idx)
+ idx = query_ball_point(radius, nsample, xyz, new_xyz)
+ grouped_xyz = index_points(xyz, idx) # [B, npoint, nsample, C]
+ grouped_xyz_norm = grouped_xyz - new_xyz.view(B, S, 1, C)
+
+ if points is not None:
+ grouped_points = index_points(points, idx)
+ new_points = torch.cat([grouped_xyz_norm, grouped_points], dim=-1) # [B, npoint, nsample, C+D]
+ else:
+ new_points = grouped_xyz_norm
+ if returnfps:
+ return new_xyz, new_points, grouped_xyz, fps_idx
+ else:
+ return new_xyz, new_points
+
+
+def sample_and_group_all(xyz, points):
+ """
+ Input:
+ xyz: input points position data, [B, N, 3]
+ points: input points data, [B, N, D]
+ Return:
+ new_xyz: sampled points position data, [B, 1, 3]
+ new_points: sampled points data, [B, 1, N, 3+D]
+ """
+ device = xyz.device
+ B, N, C = xyz.shape
+ new_xyz = torch.zeros(B, 1, C).to(device)
+ grouped_xyz = xyz.view(B, 1, N, C)
+ if points is not None:
+ new_points = torch.cat([grouped_xyz, points.view(B, 1, N, -1)], dim=-1)
+ else:
+ new_points = grouped_xyz
+ return new_xyz, new_points
+
+
+class PointNetSetAbstraction(nn.Module):
+ def __init__(self, npoint, radius, nsample, in_channel, mlp, group_all):
+ super(PointNetSetAbstraction, self).__init__()
+ self.npoint = npoint
+ self.radius = radius
+ self.nsample = nsample
+ self.mlp_convs = nn.ModuleList()
+ self.mlp_bns = nn.ModuleList()
+ last_channel = in_channel
+ for out_channel in mlp:
+ self.mlp_convs.append(nn.Conv2d(last_channel, out_channel, 1))
+ self.mlp_bns.append(nn.BatchNorm2d(out_channel))
+ last_channel = out_channel
+ self.group_all = group_all
+
+ def forward(self, xyz, points):
+ """
+ Input:
+ xyz: input points position data, [B, C, N]
+ points: input points data, [B, D, N]
+ Return:
+ new_xyz: sampled points position data, [B, C, S]
+ new_points_concat: sample points feature data, [B, D', S]
+ """
+ xyz = xyz.permute(0, 2, 1)
+ if points is not None:
+ points = points.permute(0, 2, 1)
+
+ if self.group_all:
+ new_xyz, new_points = sample_and_group_all(xyz, points)
+ else:
+ new_xyz, new_points = sample_and_group(self.npoint, self.radius, self.nsample, xyz, points)
+ # new_xyz: sampled points position data, [B, npoint, C]
+ # new_points: sampled points data, [B, npoint, nsample, C+D]
+ new_points = new_points.permute(0, 3, 2, 1) # [B, C+D, nsample,npoint]
+ for i, conv in enumerate(self.mlp_convs):
+ bn = self.mlp_bns[i]
+ new_points = F.relu(bn(conv(new_points)))
+
+ new_points = torch.max(new_points, 2)[0]
+ new_xyz = new_xyz.permute(0, 2, 1)
+ return new_xyz, new_points
+
+
+class PointNetSetAbstractionMsg(nn.Module):
+ def __init__(self, npoint, radius_list, nsample_list, in_channel, mlp_list):
+ super(PointNetSetAbstractionMsg, self).__init__()
+ self.npoint = npoint
+ self.radius_list = radius_list
+ self.nsample_list = nsample_list
+ self.conv_blocks = nn.ModuleList()
+ self.bn_blocks = nn.ModuleList()
+ for i in range(len(mlp_list)):
+ convs = nn.ModuleList()
+ bns = nn.ModuleList()
+ last_channel = in_channel + 3
+ for out_channel in mlp_list[i]:
+ convs.append(nn.Conv2d(last_channel, out_channel, 1))
+ bns.append(nn.BatchNorm2d(out_channel))
+ last_channel = out_channel
+ self.conv_blocks.append(convs)
+ self.bn_blocks.append(bns)
+
+ def forward(self, xyz, points):
+ """
+ Input:
+ xyz: input points position data, [B, C, N]
+ points: input points data, [B, D, N]
+ Return:
+ new_xyz: sampled points position data, [B, C, S]
+ new_points_concat: sample points feature data, [B, D', S]
+ """
+ xyz = xyz.permute(0, 2, 1)
+ if points is not None:
+ points = points.permute(0, 2, 1)
+
+ B, N, C = xyz.shape
+ S = self.npoint
+ new_xyz = index_points(xyz, farthest_point_sample(xyz, S))
+ new_points_list = []
+ for i, radius in enumerate(self.radius_list):
+ K = self.nsample_list[i]
+ group_idx = query_ball_point(radius, K, xyz, new_xyz)
+ grouped_xyz = index_points(xyz, group_idx)
+ grouped_xyz -= new_xyz.view(B, S, 1, C)
+ if points is not None:
+ grouped_points = index_points(points, group_idx)
+ grouped_points = torch.cat([grouped_points, grouped_xyz], dim=-1)
+ else:
+ grouped_points = grouped_xyz
+
+ grouped_points = grouped_points.permute(0, 3, 2, 1) # [B, D, K, S]
+ for j in range(len(self.conv_blocks[i])):
+ conv = self.conv_blocks[i][j]
+ bn = self.bn_blocks[i][j]
+ grouped_points = F.relu(bn(conv(grouped_points)))
+ new_points = torch.max(grouped_points, 2)[0] # [B, D', S]
+ new_points_list.append(new_points)
+
+ new_xyz = new_xyz.permute(0, 2, 1)
+ new_points_concat = torch.cat(new_points_list, dim=1)
+ return new_xyz, new_points_concat
+
+
+class PointNetFeaturePropagation(nn.Module):
+ def __init__(self, in_channel, mlp):
+ super(PointNetFeaturePropagation, self).__init__()
+ self.mlp_convs = nn.ModuleList()
+ self.mlp_bns = nn.ModuleList()
+ last_channel = in_channel
+ for out_channel in mlp:
+ self.mlp_convs.append(nn.Conv1d(last_channel, out_channel, 1))
+ self.mlp_bns.append(nn.BatchNorm1d(out_channel))
+ last_channel = out_channel
+
+ def forward(self, xyz1, xyz2, points1, points2):
+ """
+ Input:
+ xyz1: input points position data, [B, C, N]
+ xyz2: sampled input points position data, [B, C, S]
+ points1: input points data, [B, D, N]
+ points2: input points data, [B, D, S]
+ Return:
+ new_points: upsampled points data, [B, D', N]
+ """
+ xyz1 = xyz1.permute(0, 2, 1)
+ xyz2 = xyz2.permute(0, 2, 1)
+
+ points2 = points2.permute(0, 2, 1)
+ B, N, C = xyz1.shape
+ _, S, _ = xyz2.shape
+
+ if S == 1:
+ interpolated_points = points2.repeat(1, N, 1)
+ else:
+ dists = square_distance(xyz1, xyz2)
+ dists, idx = dists.sort(dim=-1)
+ dists, idx = dists[:, :, :3], idx[:, :, :3] # [B, N, 3]
+
+ dist_recip = 1.0 / (dists + 1e-8)
+ norm = torch.sum(dist_recip, dim=2, keepdim=True)
+ weight = dist_recip / norm
+ interpolated_points = torch.sum(index_points(points2, idx) * weight.view(B, N, 3, 1), dim=2)
+
+ if points1 is not None:
+ points1 = points1.permute(0, 2, 1)
+ new_points = torch.cat([points1, interpolated_points], dim=-1)
+ else:
+ new_points = interpolated_points
+
+ new_points = new_points.permute(0, 2, 1)
+ for i, conv in enumerate(self.mlp_convs):
+ bn = self.mlp_bns[i]
+ new_points = F.relu(bn(conv(new_points)))
+ return new_points
+
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet_cls.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet_cls.py
new file mode 100644
index 0000000000..2e95a69014
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet_cls.py
@@ -0,0 +1,40 @@
+import torch.nn as nn
+import torch.utils.data
+import torch.nn.functional as F
+from pointnet_utils import PointNetEncoder, feature_transform_reguliarzer
+
+class get_model(nn.Module):
+ def __init__(self, k=40, normal_channel=True):
+ super(get_model, self).__init__()
+ if normal_channel:
+ channel = 6
+ else:
+ channel = 3
+ self.feat = PointNetEncoder(global_feat=True, feature_transform=True, channel=channel)
+ self.fc1 = nn.Linear(1024, 512)
+ self.fc2 = nn.Linear(512, 256)
+ self.fc3 = nn.Linear(256, k)
+ self.dropout = nn.Dropout(p=0.4)
+ self.bn1 = nn.BatchNorm1d(512)
+ self.bn2 = nn.BatchNorm1d(256)
+ self.relu = nn.ReLU()
+
+ def forward(self, x):
+ x, trans, trans_feat = self.feat(x)
+ x = F.relu(self.bn1(self.fc1(x)))
+ x = F.relu(self.bn2(self.dropout(self.fc2(x))))
+ x = self.fc3(x)
+ x = F.log_softmax(x, dim=1)
+ return x, trans_feat
+
+class get_loss(torch.nn.Module):
+ def __init__(self, mat_diff_loss_scale=0.001):
+ super(get_loss, self).__init__()
+ self.mat_diff_loss_scale = mat_diff_loss_scale
+
+ def forward(self, pred, target, trans_feat):
+ loss = F.nll_loss(pred, target)
+ mat_diff_loss = feature_transform_reguliarzer(trans_feat)
+
+ total_loss = loss + mat_diff_loss * self.mat_diff_loss_scale
+ return total_loss
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet_part_seg.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet_part_seg.py
new file mode 100644
index 0000000000..d96a83cc56
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet_part_seg.py
@@ -0,0 +1,86 @@
+import torch
+import torch.nn as nn
+import torch.nn.parallel
+import torch.utils.data
+import torch.nn.functional as F
+from pointnet_utils import STN3d, STNkd, feature_transform_reguliarzer
+
+
+class get_model(nn.Module):
+ def __init__(self, part_num=50, normal_channel=True):
+ super(get_model, self).__init__()
+ if normal_channel:
+ channel = 6
+ else:
+ channel = 3
+ self.part_num = part_num
+ self.stn = STN3d(channel)
+ self.conv1 = torch.nn.Conv1d(channel, 64, 1)
+ self.conv2 = torch.nn.Conv1d(64, 128, 1)
+ self.conv3 = torch.nn.Conv1d(128, 128, 1)
+ self.conv4 = torch.nn.Conv1d(128, 512, 1)
+ self.conv5 = torch.nn.Conv1d(512, 2048, 1)
+ self.bn1 = nn.BatchNorm1d(64)
+ self.bn2 = nn.BatchNorm1d(128)
+ self.bn3 = nn.BatchNorm1d(128)
+ self.bn4 = nn.BatchNorm1d(512)
+ self.bn5 = nn.BatchNorm1d(2048)
+ self.fstn = STNkd(k=128)
+ self.convs1 = torch.nn.Conv1d(4944, 256, 1)
+ self.convs2 = torch.nn.Conv1d(256, 256, 1)
+ self.convs3 = torch.nn.Conv1d(256, 128, 1)
+ self.convs4 = torch.nn.Conv1d(128, part_num, 1)
+ self.bns1 = nn.BatchNorm1d(256)
+ self.bns2 = nn.BatchNorm1d(256)
+ self.bns3 = nn.BatchNorm1d(128)
+
+ def forward(self, point_cloud, label):
+ B, D, N = point_cloud.size()
+ trans = self.stn(point_cloud)
+ point_cloud = point_cloud.transpose(2, 1)
+ if D > 3:
+ point_cloud, feature = point_cloud.split(3, dim=2)
+ point_cloud = torch.bmm(point_cloud, trans)
+ if D > 3:
+ point_cloud = torch.cat([point_cloud, feature], dim=2)
+
+ point_cloud = point_cloud.transpose(2, 1)
+
+ out1 = F.relu(self.bn1(self.conv1(point_cloud)))
+ out2 = F.relu(self.bn2(self.conv2(out1)))
+ out3 = F.relu(self.bn3(self.conv3(out2)))
+
+ trans_feat = self.fstn(out3)
+ x = out3.transpose(2, 1)
+ net_transformed = torch.bmm(x, trans_feat)
+ net_transformed = net_transformed.transpose(2, 1)
+
+ out4 = F.relu(self.bn4(self.conv4(net_transformed)))
+ out5 = self.bn5(self.conv5(out4))
+ out_max = torch.max(out5, 2, keepdim=True)[0]
+ out_max = out_max.view(-1, 2048)
+
+ out_max = torch.cat([out_max,label.squeeze(1)],1)
+ expand = out_max.view(-1, 2048+16, 1).repeat(1, 1, N)
+ concat = torch.cat([expand, out1, out2, out3, out4, out5], 1)
+ net = F.relu(self.bns1(self.convs1(concat)))
+ net = F.relu(self.bns2(self.convs2(net)))
+ net = F.relu(self.bns3(self.convs3(net)))
+ net = self.convs4(net)
+ net = net.transpose(2, 1).contiguous()
+ net = F.log_softmax(net.view(-1, self.part_num), dim=-1)
+ net = net.view(B, N, self.part_num) # [B, N, 50]
+
+ return net, trans_feat
+
+
+class get_loss(torch.nn.Module):
+ def __init__(self, mat_diff_loss_scale=0.001):
+ super(get_loss, self).__init__()
+ self.mat_diff_loss_scale = mat_diff_loss_scale
+
+ def forward(self, pred, target, trans_feat):
+ loss = F.nll_loss(pred, target)
+ mat_diff_loss = feature_transform_reguliarzer(trans_feat)
+ total_loss = loss + mat_diff_loss * self.mat_diff_loss_scale
+ return total_loss
\ No newline at end of file
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet_sem_seg.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet_sem_seg.py
new file mode 100644
index 0000000000..52adb977fc
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet_sem_seg.py
@@ -0,0 +1,50 @@
+import torch
+import torch.nn as nn
+import torch.nn.parallel
+import torch.utils.data
+import torch.nn.functional as F
+from pointnet_utils import PointNetEncoder, feature_transform_reguliarzer
+
+
+class get_model(nn.Module):
+ def __init__(self, num_class):
+ super(get_model, self).__init__()
+ self.k = num_class
+ self.feat = PointNetEncoder(global_feat=False, feature_transform=True, channel=9)
+ self.conv1 = torch.nn.Conv1d(1088, 512, 1)
+ self.conv2 = torch.nn.Conv1d(512, 256, 1)
+ self.conv3 = torch.nn.Conv1d(256, 128, 1)
+ self.conv4 = torch.nn.Conv1d(128, self.k, 1)
+ self.bn1 = nn.BatchNorm1d(512)
+ self.bn2 = nn.BatchNorm1d(256)
+ self.bn3 = nn.BatchNorm1d(128)
+
+ def forward(self, x):
+ batchsize = x.size()[0]
+ n_pts = x.size()[2]
+ x, trans, trans_feat = self.feat(x)
+ x = F.relu(self.bn1(self.conv1(x)))
+ x = F.relu(self.bn2(self.conv2(x)))
+ x = F.relu(self.bn3(self.conv3(x)))
+ x = self.conv4(x)
+ x = x.transpose(2,1).contiguous()
+ x = F.log_softmax(x.view(-1,self.k), dim=-1)
+ x = x.view(batchsize, n_pts, self.k)
+ return x, trans_feat
+
+class get_loss(torch.nn.Module):
+ def __init__(self, mat_diff_loss_scale=0.001):
+ super(get_loss, self).__init__()
+ self.mat_diff_loss_scale = mat_diff_loss_scale
+
+ def forward(self, pred, target, trans_feat, weight):
+ loss = F.nll_loss(pred, target, weight = weight)
+ mat_diff_loss = feature_transform_reguliarzer(trans_feat)
+ total_loss = loss + mat_diff_loss * self.mat_diff_loss_scale
+ return total_loss
+
+
+if __name__ == '__main__':
+ model = get_model(13)
+ xyz = torch.rand(12, 3, 2048)
+ (model(xyz))
\ No newline at end of file
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet_utils.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet_utils.py
new file mode 100644
index 0000000000..1ccb3a53fb
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/models/pointnet_utils.py
@@ -0,0 +1,142 @@
+import torch
+import torch.nn as nn
+import torch.nn.parallel
+import torch.utils.data
+from torch.autograd import Variable
+import numpy as np
+import torch.nn.functional as F
+
+
+class STN3d(nn.Module):
+ def __init__(self, channel):
+ super(STN3d, self).__init__()
+ self.conv1 = torch.nn.Conv1d(channel, 64, 1)
+ self.conv2 = torch.nn.Conv1d(64, 128, 1)
+ self.conv3 = torch.nn.Conv1d(128, 1024, 1)
+ self.fc1 = nn.Linear(1024, 512)
+ self.fc2 = nn.Linear(512, 256)
+ self.fc3 = nn.Linear(256, 9)
+ self.relu = nn.ReLU()
+
+ self.bn1 = nn.BatchNorm1d(64)
+ self.bn2 = nn.BatchNorm1d(128)
+ self.bn3 = nn.BatchNorm1d(1024)
+ self.bn4 = nn.BatchNorm1d(512)
+ self.bn5 = nn.BatchNorm1d(256)
+
+ def forward(self, x):
+ batchsize = x.size()[0]
+ x = F.relu(self.bn1(self.conv1(x)))
+ x = F.relu(self.bn2(self.conv2(x)))
+ x = F.relu(self.bn3(self.conv3(x)))
+ x = torch.max(x, 2, keepdim=True)[0]
+ x = x.view(-1, 1024)
+
+ x = F.relu(self.bn4(self.fc1(x)))
+ x = F.relu(self.bn5(self.fc2(x)))
+ x = self.fc3(x)
+
+ iden = Variable(torch.from_numpy(np.array([1, 0, 0, 0, 1, 0, 0, 0, 1]).astype(np.float32))).view(1, 9).repeat(
+ batchsize, 1)
+ if x.is_cuda:
+ iden = iden.cuda()
+ x = x + iden
+ x = x.view(-1, 3, 3)
+ return x
+
+
+class STNkd(nn.Module):
+ def __init__(self, k=64):
+ super(STNkd, self).__init__()
+ self.conv1 = torch.nn.Conv1d(k, 64, 1)
+ self.conv2 = torch.nn.Conv1d(64, 128, 1)
+ self.conv3 = torch.nn.Conv1d(128, 1024, 1)
+ self.fc1 = nn.Linear(1024, 512)
+ self.fc2 = nn.Linear(512, 256)
+ self.fc3 = nn.Linear(256, k * k)
+ self.relu = nn.ReLU()
+
+ self.bn1 = nn.BatchNorm1d(64)
+ self.bn2 = nn.BatchNorm1d(128)
+ self.bn3 = nn.BatchNorm1d(1024)
+ self.bn4 = nn.BatchNorm1d(512)
+ self.bn5 = nn.BatchNorm1d(256)
+
+ self.k = k
+
+ def forward(self, x):
+ batchsize = x.size()[0]
+ x = F.relu(self.bn1(self.conv1(x)))
+ x = F.relu(self.bn2(self.conv2(x)))
+ x = F.relu(self.bn3(self.conv3(x)))
+ x = torch.max(x, 2, keepdim=True)[0]
+ x = x.view(-1, 1024)
+
+ x = F.relu(self.bn4(self.fc1(x)))
+ x = F.relu(self.bn5(self.fc2(x)))
+ x = self.fc3(x)
+
+ iden = Variable(torch.from_numpy(np.eye(self.k).flatten().astype(np.float32))).view(1, self.k * self.k).repeat(
+ batchsize, 1)
+ if x.is_cuda:
+ iden = iden.cuda()
+ x = x + iden
+ x = x.view(-1, self.k, self.k)
+ return x
+
+
+class PointNetEncoder(nn.Module):
+ def __init__(self, global_feat=True, feature_transform=False, channel=3):
+ super(PointNetEncoder, self).__init__()
+ self.stn = STN3d(channel)
+ self.conv1 = torch.nn.Conv1d(channel, 64, 1)
+ self.conv2 = torch.nn.Conv1d(64, 128, 1)
+ self.conv3 = torch.nn.Conv1d(128, 1024, 1)
+ self.bn1 = nn.BatchNorm1d(64)
+ self.bn2 = nn.BatchNorm1d(128)
+ self.bn3 = nn.BatchNorm1d(1024)
+ self.global_feat = global_feat
+ self.feature_transform = feature_transform
+ if self.feature_transform:
+ self.fstn = STNkd(k=64)
+
+ def forward(self, x):
+ B, D, N = x.size()
+ trans = self.stn(x)
+ x = x.transpose(2, 1)
+ if D > 3:
+ feature = x[:, :, 3:]
+ x = x[:, :, :3]
+ x = torch.bmm(x, trans)
+ if D > 3:
+ x = torch.cat([x, feature], dim=2)
+ x = x.transpose(2, 1)
+ x = F.relu(self.bn1(self.conv1(x)))
+
+ if self.feature_transform:
+ trans_feat = self.fstn(x)
+ x = x.transpose(2, 1)
+ x = torch.bmm(x, trans_feat)
+ x = x.transpose(2, 1)
+ else:
+ trans_feat = None
+
+ pointfeat = x
+ x = F.relu(self.bn2(self.conv2(x)))
+ x = self.bn3(self.conv3(x))
+ x = torch.max(x, 2, keepdim=True)[0]
+ x = x.view(-1, 1024)
+ if self.global_feat:
+ return x, trans, trans_feat
+ else:
+ x = x.view(-1, 1024, 1).repeat(1, 1, N)
+ return torch.cat([x, pointfeat], 1), trans, trans_feat
+
+
+def feature_transform_reguliarzer(trans):
+ d = trans.size()[1]
+ I = torch.eye(d)[None, :, :]
+ if trans.is_cuda:
+ I = I.cuda()
+ loss = torch.mean(torch.norm(torch.bmm(trans, trans.transpose(2, 1)) - I, dim=(1, 2)))
+ return loss
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/provider.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/provider.py
new file mode 100644
index 0000000000..56046916e0
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/provider.py
@@ -0,0 +1,251 @@
+import numpy as np
+
+def normalize_data(batch_data):
+ """ Normalize the batch data, use coordinates of the block centered at origin,
+ Input:
+ BxNxC array
+ Output:
+ BxNxC array
+ """
+ B, N, C = batch_data.shape
+ normal_data = np.zeros((B, N, C))
+ for b in range(B):
+ pc = batch_data[b]
+ centroid = np.mean(pc, axis=0)
+ pc = pc - centroid
+ m = np.max(np.sqrt(np.sum(pc ** 2, axis=1)))
+ pc = pc / m
+ normal_data[b] = pc
+ return normal_data
+
+
+def shuffle_data(data, labels):
+ """ Shuffle data and labels.
+ Input:
+ data: B,N,... numpy array
+ label: B,... numpy array
+ Return:
+ shuffled data, label and shuffle indices
+ """
+ idx = np.arange(len(labels))
+ np.random.shuffle(idx)
+ return data[idx, ...], labels[idx], idx
+
+def shuffle_points(batch_data):
+ """ Shuffle orders of points in each point cloud -- changes FPS behavior.
+ Use the same shuffling idx for the entire batch.
+ Input:
+ BxNxC array
+ Output:
+ BxNxC array
+ """
+ idx = np.arange(batch_data.shape[1])
+ np.random.shuffle(idx)
+ return batch_data[:,idx,:]
+
+def rotate_point_cloud(batch_data):
+ """ Randomly rotate the point clouds to augument the dataset
+ rotation is per shape based along up direction
+ Input:
+ BxNx3 array, original batch of point clouds
+ Return:
+ BxNx3 array, rotated batch of point clouds
+ """
+ rotated_data = np.zeros(batch_data.shape, dtype=np.float32)
+ for k in range(batch_data.shape[0]):
+ rotation_angle = np.random.uniform() * 2 * np.pi
+ cosval = np.cos(rotation_angle)
+ sinval = np.sin(rotation_angle)
+ rotation_matrix = np.array([[cosval, 0, sinval],
+ [0, 1, 0],
+ [-sinval, 0, cosval]])
+ shape_pc = batch_data[k, ...]
+ rotated_data[k, ...] = np.dot(shape_pc.reshape((-1, 3)), rotation_matrix)
+ return rotated_data
+
+def rotate_point_cloud_z(batch_data):
+ """ Randomly rotate the point clouds to augument the dataset
+ rotation is per shape based along up direction
+ Input:
+ BxNx3 array, original batch of point clouds
+ Return:
+ BxNx3 array, rotated batch of point clouds
+ """
+ rotated_data = np.zeros(batch_data.shape, dtype=np.float32)
+ for k in range(batch_data.shape[0]):
+ rotation_angle = np.random.uniform() * 2 * np.pi
+ cosval = np.cos(rotation_angle)
+ sinval = np.sin(rotation_angle)
+ rotation_matrix = np.array([[cosval, sinval, 0],
+ [-sinval, cosval, 0],
+ [0, 0, 1]])
+ shape_pc = batch_data[k, ...]
+ rotated_data[k, ...] = np.dot(shape_pc.reshape((-1, 3)), rotation_matrix)
+ return rotated_data
+
+def rotate_point_cloud_with_normal(batch_xyz_normal):
+ ''' Randomly rotate XYZ, normal point cloud.
+ Input:
+ batch_xyz_normal: B,N,6, first three channels are XYZ, last 3 all normal
+ Output:
+ B,N,6, rotated XYZ, normal point cloud
+ '''
+ for k in range(batch_xyz_normal.shape[0]):
+ rotation_angle = np.random.uniform() * 2 * np.pi
+ cosval = np.cos(rotation_angle)
+ sinval = np.sin(rotation_angle)
+ rotation_matrix = np.array([[cosval, 0, sinval],
+ [0, 1, 0],
+ [-sinval, 0, cosval]])
+ shape_pc = batch_xyz_normal[k,:,0:3]
+ shape_normal = batch_xyz_normal[k,:,3:6]
+ batch_xyz_normal[k,:,0:3] = np.dot(shape_pc.reshape((-1, 3)), rotation_matrix)
+ batch_xyz_normal[k,:,3:6] = np.dot(shape_normal.reshape((-1, 3)), rotation_matrix)
+ return batch_xyz_normal
+
+def rotate_perturbation_point_cloud_with_normal(batch_data, angle_sigma=0.06, angle_clip=0.18):
+ """ Randomly perturb the point clouds by small rotations
+ Input:
+ BxNx6 array, original batch of point clouds and point normals
+ Return:
+ BxNx3 array, rotated batch of point clouds
+ """
+ rotated_data = np.zeros(batch_data.shape, dtype=np.float32)
+ for k in range(batch_data.shape[0]):
+ angles = np.clip(angle_sigma*np.random.randn(3), -angle_clip, angle_clip)
+ Rx = np.array([[1,0,0],
+ [0,np.cos(angles[0]),-np.sin(angles[0])],
+ [0,np.sin(angles[0]),np.cos(angles[0])]])
+ Ry = np.array([[np.cos(angles[1]),0,np.sin(angles[1])],
+ [0,1,0],
+ [-np.sin(angles[1]),0,np.cos(angles[1])]])
+ Rz = np.array([[np.cos(angles[2]),-np.sin(angles[2]),0],
+ [np.sin(angles[2]),np.cos(angles[2]),0],
+ [0,0,1]])
+ R = np.dot(Rz, np.dot(Ry,Rx))
+ shape_pc = batch_data[k,:,0:3]
+ shape_normal = batch_data[k,:,3:6]
+ rotated_data[k,:,0:3] = np.dot(shape_pc.reshape((-1, 3)), R)
+ rotated_data[k,:,3:6] = np.dot(shape_normal.reshape((-1, 3)), R)
+ return rotated_data
+
+
+def rotate_point_cloud_by_angle(batch_data, rotation_angle):
+ """ Rotate the point cloud along up direction with certain angle.
+ Input:
+ BxNx3 array, original batch of point clouds
+ Return:
+ BxNx3 array, rotated batch of point clouds
+ """
+ rotated_data = np.zeros(batch_data.shape, dtype=np.float32)
+ for k in range(batch_data.shape[0]):
+ #rotation_angle = np.random.uniform() * 2 * np.pi
+ cosval = np.cos(rotation_angle)
+ sinval = np.sin(rotation_angle)
+ rotation_matrix = np.array([[cosval, 0, sinval],
+ [0, 1, 0],
+ [-sinval, 0, cosval]])
+ shape_pc = batch_data[k,:,0:3]
+ rotated_data[k,:,0:3] = np.dot(shape_pc.reshape((-1, 3)), rotation_matrix)
+ return rotated_data
+
+def rotate_point_cloud_by_angle_with_normal(batch_data, rotation_angle):
+ """ Rotate the point cloud along up direction with certain angle.
+ Input:
+ BxNx6 array, original batch of point clouds with normal
+ scalar, angle of rotation
+ Return:
+ BxNx6 array, rotated batch of point clouds iwth normal
+ """
+ rotated_data = np.zeros(batch_data.shape, dtype=np.float32)
+ for k in range(batch_data.shape[0]):
+ #rotation_angle = np.random.uniform() * 2 * np.pi
+ cosval = np.cos(rotation_angle)
+ sinval = np.sin(rotation_angle)
+ rotation_matrix = np.array([[cosval, 0, sinval],
+ [0, 1, 0],
+ [-sinval, 0, cosval]])
+ shape_pc = batch_data[k,:,0:3]
+ shape_normal = batch_data[k,:,3:6]
+ rotated_data[k,:,0:3] = np.dot(shape_pc.reshape((-1, 3)), rotation_matrix)
+ rotated_data[k,:,3:6] = np.dot(shape_normal.reshape((-1,3)), rotation_matrix)
+ return rotated_data
+
+
+
+def rotate_perturbation_point_cloud(batch_data, angle_sigma=0.06, angle_clip=0.18):
+ """ Randomly perturb the point clouds by small rotations
+ Input:
+ BxNx3 array, original batch of point clouds
+ Return:
+ BxNx3 array, rotated batch of point clouds
+ """
+ rotated_data = np.zeros(batch_data.shape, dtype=np.float32)
+ for k in range(batch_data.shape[0]):
+ angles = np.clip(angle_sigma*np.random.randn(3), -angle_clip, angle_clip)
+ Rx = np.array([[1,0,0],
+ [0,np.cos(angles[0]),-np.sin(angles[0])],
+ [0,np.sin(angles[0]),np.cos(angles[0])]])
+ Ry = np.array([[np.cos(angles[1]),0,np.sin(angles[1])],
+ [0,1,0],
+ [-np.sin(angles[1]),0,np.cos(angles[1])]])
+ Rz = np.array([[np.cos(angles[2]),-np.sin(angles[2]),0],
+ [np.sin(angles[2]),np.cos(angles[2]),0],
+ [0,0,1]])
+ R = np.dot(Rz, np.dot(Ry,Rx))
+ shape_pc = batch_data[k, ...]
+ rotated_data[k, ...] = np.dot(shape_pc.reshape((-1, 3)), R)
+ return rotated_data
+
+
+def jitter_point_cloud(batch_data, sigma=0.01, clip=0.05):
+ """ Randomly jitter points. jittering is per point.
+ Input:
+ BxNx3 array, original batch of point clouds
+ Return:
+ BxNx3 array, jittered batch of point clouds
+ """
+ B, N, C = batch_data.shape
+ assert(clip > 0)
+ jittered_data = np.clip(sigma * np.random.randn(B, N, C), -1*clip, clip)
+ jittered_data += batch_data
+ return jittered_data
+
+def shift_point_cloud(batch_data, shift_range=0.1):
+ """ Randomly shift point cloud. Shift is per point cloud.
+ Input:
+ BxNx3 array, original batch of point clouds
+ Return:
+ BxNx3 array, shifted batch of point clouds
+ """
+ B, N, C = batch_data.shape
+ shifts = np.random.uniform(-shift_range, shift_range, (B,3))
+ for batch_index in range(B):
+ batch_data[batch_index,:,:] += shifts[batch_index,:]
+ return batch_data
+
+
+def random_scale_point_cloud(batch_data, scale_low=0.8, scale_high=1.25):
+ """ Randomly scale the point cloud. Scale is per point cloud.
+ Input:
+ BxNx3 array, original batch of point clouds
+ Return:
+ BxNx3 array, scaled batch of point clouds
+ """
+ B, N, C = batch_data.shape
+ scales = np.random.uniform(scale_low, scale_high, B)
+ for batch_index in range(B):
+ batch_data[batch_index,:,:] *= scales[batch_index]
+ return batch_data
+
+def random_point_dropout(batch_pc, max_dropout_ratio=0.875):
+ ''' batch_pc: BxNx3 '''
+ for b in range(batch_pc.shape[0]):
+ dropout_ratio = np.random.random()*max_dropout_ratio # 0~0.875
+ drop_idx = np.where(np.random.random((batch_pc.shape[1]))<=dropout_ratio)[0]
+ if len(drop_idx)>0:
+ batch_pc[b,drop_idx,:] = batch_pc[b,0,:] # set to the first point
+ return batch_pc
+
+
+
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/test_classification.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/test_classification.py
new file mode 100644
index 0000000000..79ac09723b
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/test_classification.py
@@ -0,0 +1,115 @@
+"""
+Author: Benny
+Date: Nov 2019
+"""
+from data_utils.ModelNetDataLoader import ModelNetDataLoader
+import argparse
+import numpy as np
+import os
+import torch
+import logging
+from tqdm import tqdm
+import sys
+import importlib
+
+BASE_DIR = os.path.dirname(os.path.abspath(__file__))
+ROOT_DIR = BASE_DIR
+sys.path.append(os.path.join(ROOT_DIR, 'models'))
+
+
+def parse_args():
+ '''PARAMETERS'''
+ parser = argparse.ArgumentParser('Testing')
+ parser.add_argument('--use_cpu', action='store_true', default=False, help='use cpu mode')
+ parser.add_argument('--gpu', type=str, default='0', help='specify gpu device')
+ parser.add_argument('--batch_size', type=int, default=24, help='batch size in training')
+ parser.add_argument('--num_category', default=40, type=int, choices=[10, 40], help='training on ModelNet10/40')
+ parser.add_argument('--num_point', type=int, default=1024, help='Point Number')
+ parser.add_argument('--log_dir', type=str, required=True, help='Experiment root')
+ parser.add_argument('--use_normals', action='store_true', default=False, help='use normals')
+ parser.add_argument('--use_uniform_sample', action='store_true', default=False, help='use uniform sampiling')
+ parser.add_argument('--num_votes', type=int, default=3, help='Aggregate classification scores with voting')
+ return parser.parse_args()
+
+
+def test(model, loader, num_class=40, vote_num=1):
+ mean_correct = []
+ classifier = model.eval()
+ class_acc = np.zeros((num_class, 3))
+
+ for j, (points, target) in tqdm(enumerate(loader), total=len(loader)):
+ if not args.use_cpu:
+ points, target = points.cuda(), target.cuda()
+
+ points = points.transpose(2, 1)
+ vote_pool = torch.zeros(target.size()[0], num_class).cuda()
+
+ for _ in range(vote_num):
+ pred, _ = classifier(points)
+ vote_pool += pred
+ pred = vote_pool / vote_num
+ pred_choice = pred.data.max(1)[1]
+
+ for cat in np.unique(target.cpu()):
+ classacc = pred_choice[target == cat].eq(target[target == cat].long().data).cpu().sum()
+ class_acc[cat, 0] += classacc.item() / float(points[target == cat].size()[0])
+ class_acc[cat, 1] += 1
+ correct = pred_choice.eq(target.long().data).cpu().sum()
+ mean_correct.append(correct.item() / float(points.size()[0]))
+
+ class_acc[:, 2] = class_acc[:, 0] / class_acc[:, 1]
+ class_acc = np.mean(class_acc[:, 2])
+ instance_acc = np.mean(mean_correct)
+ return instance_acc, class_acc
+
+
+def main(args):
+ def log_string(str):
+ logger.info(str)
+ print(str)
+
+ '''HYPER PARAMETER'''
+ os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu
+
+ '''CREATE DIR'''
+ experiment_dir = 'log/classification/' + args.log_dir
+
+ '''LOG'''
+ args = parse_args()
+ logger = logging.getLogger("Model")
+ logger.setLevel(logging.INFO)
+ formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
+ file_handler = logging.FileHandler('%s/eval.txt' % experiment_dir)
+ file_handler.setLevel(logging.INFO)
+ file_handler.setFormatter(formatter)
+ logger.addHandler(file_handler)
+ log_string('PARAMETER ...')
+ log_string(args)
+
+ '''DATA LOADING'''
+ log_string('Load dataset ...')
+ data_path = 'data/modelnet40_normal_resampled/'
+
+ test_dataset = ModelNetDataLoader(root=data_path, args=args, split='test', process_data=False)
+ testDataLoader = torch.utils.data.DataLoader(test_dataset, batch_size=args.batch_size, shuffle=False, num_workers=10)
+
+ '''MODEL LOADING'''
+ num_class = args.num_category
+ model_name = os.listdir(experiment_dir + '/logs')[0].split('.')[0]
+ model = importlib.import_module(model_name)
+
+ classifier = model.get_model(num_class, normal_channel=args.use_normals)
+ if not args.use_cpu:
+ classifier = classifier.cuda()
+
+ checkpoint = torch.load(str(experiment_dir) + '/checkpoints/best_model.pth')
+ classifier.load_state_dict(checkpoint['model_state_dict'])
+
+ with torch.no_grad():
+ instance_acc, class_acc = test(classifier.eval(), testDataLoader, vote_num=args.num_votes, num_class=num_class)
+ log_string('Test Instance Accuracy: %f, Class Accuracy: %f' % (instance_acc, class_acc))
+
+
+if __name__ == '__main__':
+ args = parse_args()
+ main(args)
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/test_partseg.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/test_partseg.py
new file mode 100644
index 0000000000..687e1deabe
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/test_partseg.py
@@ -0,0 +1,167 @@
+"""
+Author: Benny
+Date: Nov 2019
+"""
+import argparse
+import os
+from data_utils.ShapeNetDataLoader import PartNormalDataset
+import torch
+import logging
+import sys
+import importlib
+from tqdm import tqdm
+import numpy as np
+
+BASE_DIR = os.path.dirname(os.path.abspath(__file__))
+ROOT_DIR = BASE_DIR
+sys.path.append(os.path.join(ROOT_DIR, 'models'))
+
+seg_classes = {'Earphone': [16, 17, 18], 'Motorbike': [30, 31, 32, 33, 34, 35], 'Rocket': [41, 42, 43],
+ 'Car': [8, 9, 10, 11], 'Laptop': [28, 29], 'Cap': [6, 7], 'Skateboard': [44, 45, 46], 'Mug': [36, 37],
+ 'Guitar': [19, 20, 21], 'Bag': [4, 5], 'Lamp': [24, 25, 26, 27], 'Table': [47, 48, 49],
+ 'Airplane': [0, 1, 2, 3], 'Pistol': [38, 39, 40], 'Chair': [12, 13, 14, 15], 'Knife': [22, 23]}
+
+seg_label_to_cat = {} # {0:Airplane, 1:Airplane, ...49:Table}
+for cat in seg_classes.keys():
+ for label in seg_classes[cat]:
+ seg_label_to_cat[label] = cat
+
+
+def to_categorical(y, num_classes):
+ """ 1-hot encodes a tensor """
+ new_y = torch.eye(num_classes)[y.cpu().data.numpy(),]
+ if (y.is_cuda):
+ return new_y.cuda()
+ return new_y
+
+
+def parse_args():
+ '''PARAMETERS'''
+ parser = argparse.ArgumentParser('PointNet')
+ parser.add_argument('--batch_size', type=int, default=24, help='batch size in testing')
+ parser.add_argument('--gpu', type=str, default='0', help='specify gpu device')
+ parser.add_argument('--num_point', type=int, default=2048, help='point Number')
+ parser.add_argument('--log_dir', type=str, required=True, help='experiment root')
+ parser.add_argument('--normal', action='store_true', default=False, help='use normals')
+ parser.add_argument('--num_votes', type=int, default=3, help='aggregate segmentation scores with voting')
+ return parser.parse_args()
+
+
+def main(args):
+ def log_string(str):
+ logger.info(str)
+ print(str)
+
+ '''HYPER PARAMETER'''
+ os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu
+ experiment_dir = 'log/part_seg/' + args.log_dir
+
+ '''LOG'''
+ args = parse_args()
+ logger = logging.getLogger("Model")
+ logger.setLevel(logging.INFO)
+ formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
+ file_handler = logging.FileHandler('%s/eval.txt' % experiment_dir)
+ file_handler.setLevel(logging.INFO)
+ file_handler.setFormatter(formatter)
+ logger.addHandler(file_handler)
+ log_string('PARAMETER ...')
+ log_string(args)
+
+ root = 'data/shapenetcore_partanno_segmentation_benchmark_v0_normal/'
+
+ TEST_DATASET = PartNormalDataset(root=root, npoints=args.num_point, split='test', normal_channel=args.normal)
+ testDataLoader = torch.utils.data.DataLoader(TEST_DATASET, batch_size=args.batch_size, shuffle=False, num_workers=4)
+ log_string("The number of test data is: %d" % len(TEST_DATASET))
+ num_classes = 16
+ num_part = 50
+
+ '''MODEL LOADING'''
+ model_name = os.listdir(experiment_dir + '/logs')[0].split('.')[0]
+ MODEL = importlib.import_module(model_name)
+ classifier = MODEL.get_model(num_part, normal_channel=args.normal).cuda()
+ checkpoint = torch.load(str(experiment_dir) + '/checkpoints/best_model.pth')
+ classifier.load_state_dict(checkpoint['model_state_dict'])
+
+ with torch.no_grad():
+ test_metrics = {}
+ total_correct = 0
+ total_seen = 0
+ total_seen_class = [0 for _ in range(num_part)]
+ total_correct_class = [0 for _ in range(num_part)]
+ shape_ious = {cat: [] for cat in seg_classes.keys()}
+ seg_label_to_cat = {} # {0:Airplane, 1:Airplane, ...49:Table}
+
+ for cat in seg_classes.keys():
+ for label in seg_classes[cat]:
+ seg_label_to_cat[label] = cat
+
+ classifier = classifier.eval()
+ for batch_id, (points, label, target) in tqdm(enumerate(testDataLoader), total=len(testDataLoader),
+ smoothing=0.9):
+ batchsize, num_point, _ = points.size()
+ cur_batch_size, NUM_POINT, _ = points.size()
+ points, label, target = points.float().cuda(), label.long().cuda(), target.long().cuda()
+ points = points.transpose(2, 1)
+ vote_pool = torch.zeros(target.size()[0], target.size()[1], num_part).cuda()
+
+ for _ in range(args.num_votes):
+ seg_pred, _ = classifier(points, to_categorical(label, num_classes))
+ vote_pool += seg_pred
+
+ seg_pred = vote_pool / args.num_votes
+ cur_pred_val = seg_pred.cpu().data.numpy()
+ cur_pred_val_logits = cur_pred_val
+ cur_pred_val = np.zeros((cur_batch_size, NUM_POINT)).astype(np.int32)
+ target = target.cpu().data.numpy()
+
+ for i in range(cur_batch_size):
+ cat = seg_label_to_cat[target[i, 0]]
+ logits = cur_pred_val_logits[i, :, :]
+ cur_pred_val[i, :] = np.argmax(logits[:, seg_classes[cat]], 1) + seg_classes[cat][0]
+
+ correct = np.sum(cur_pred_val == target)
+ total_correct += correct
+ total_seen += (cur_batch_size * NUM_POINT)
+
+ for l in range(num_part):
+ total_seen_class[l] += np.sum(target == l)
+ total_correct_class[l] += (np.sum((cur_pred_val == l) & (target == l)))
+
+ for i in range(cur_batch_size):
+ segp = cur_pred_val[i, :]
+ segl = target[i, :]
+ cat = seg_label_to_cat[segl[0]]
+ part_ious = [0.0 for _ in range(len(seg_classes[cat]))]
+ for l in seg_classes[cat]:
+ if (np.sum(segl == l) == 0) and (
+ np.sum(segp == l) == 0): # part is not present, no prediction as well
+ part_ious[l - seg_classes[cat][0]] = 1.0
+ else:
+ part_ious[l - seg_classes[cat][0]] = np.sum((segl == l) & (segp == l)) / float(
+ np.sum((segl == l) | (segp == l)))
+ shape_ious[cat].append(np.mean(part_ious))
+
+ all_shape_ious = []
+ for cat in shape_ious.keys():
+ for iou in shape_ious[cat]:
+ all_shape_ious.append(iou)
+ shape_ious[cat] = np.mean(shape_ious[cat])
+ mean_shape_ious = np.mean(list(shape_ious.values()))
+ test_metrics['accuracy'] = total_correct / float(total_seen)
+ test_metrics['class_avg_accuracy'] = np.mean(
+ np.array(total_correct_class) / np.array(total_seen_class, dtype=np.float))
+ for cat in sorted(shape_ious.keys()):
+ log_string('eval mIoU of %s %f' % (cat + ' ' * (14 - len(cat)), shape_ious[cat]))
+ test_metrics['class_avg_iou'] = mean_shape_ious
+ test_metrics['inctance_avg_iou'] = np.mean(all_shape_ious)
+
+ log_string('Accuracy is: %.5f' % test_metrics['accuracy'])
+ log_string('Class avg accuracy is: %.5f' % test_metrics['class_avg_accuracy'])
+ log_string('Class avg mIOU is: %.5f' % test_metrics['class_avg_iou'])
+ log_string('Inctance avg mIOU is: %.5f' % test_metrics['inctance_avg_iou'])
+
+
+if __name__ == '__main__':
+ args = parse_args()
+ main(args)
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/test_semseg.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/test_semseg.py
new file mode 100644
index 0000000000..a8d8a688a3
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/test_semseg.py
@@ -0,0 +1,203 @@
+"""
+Author: Benny
+Date: Nov 2019
+"""
+import argparse
+import os
+from data_utils.S3DISDataLoader import ScannetDatasetWholeScene
+from data_utils.indoor3d_util import g_label2color
+import torch
+import logging
+from pathlib import Path
+import sys
+import importlib
+from tqdm import tqdm
+import provider
+import numpy as np
+
+BASE_DIR = os.path.dirname(os.path.abspath(__file__))
+ROOT_DIR = BASE_DIR
+sys.path.append(os.path.join(ROOT_DIR, 'models'))
+
+classes = ['ceiling', 'floor', 'wall', 'beam', 'column', 'window', 'door', 'table', 'chair', 'sofa', 'bookcase',
+ 'board', 'clutter']
+class2label = {cls: i for i, cls in enumerate(classes)}
+seg_classes = class2label
+seg_label_to_cat = {}
+for i, cat in enumerate(seg_classes.keys()):
+ seg_label_to_cat[i] = cat
+
+
+def parse_args():
+ '''PARAMETERS'''
+ parser = argparse.ArgumentParser('Model')
+ parser.add_argument('--batch_size', type=int, default=32, help='batch size in testing [default: 32]')
+ parser.add_argument('--gpu', type=str, default='0', help='specify gpu device')
+ parser.add_argument('--num_point', type=int, default=4096, help='point number [default: 4096]')
+ parser.add_argument('--log_dir', type=str, required=True, help='experiment root')
+ parser.add_argument('--visual', action='store_true', default=False, help='visualize result [default: False]')
+ parser.add_argument('--test_area', type=int, default=5, help='area for testing, option: 1-6 [default: 5]')
+ parser.add_argument('--num_votes', type=int, default=3, help='aggregate segmentation scores with voting [default: 5]')
+ return parser.parse_args()
+
+
+def add_vote(vote_label_pool, point_idx, pred_label, weight):
+ B = pred_label.shape[0]
+ N = pred_label.shape[1]
+ for b in range(B):
+ for n in range(N):
+ if weight[b, n] != 0 and not np.isinf(weight[b, n]):
+ vote_label_pool[int(point_idx[b, n]), int(pred_label[b, n])] += 1
+ return vote_label_pool
+
+
+def main(args):
+ def log_string(str):
+ logger.info(str)
+ print(str)
+
+ '''HYPER PARAMETER'''
+ os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu
+ experiment_dir = 'log/sem_seg/' + args.log_dir
+ visual_dir = experiment_dir + '/visual/'
+ visual_dir = Path(visual_dir)
+ visual_dir.mkdir(exist_ok=True)
+
+ '''LOG'''
+ args = parse_args()
+ logger = logging.getLogger("Model")
+ logger.setLevel(logging.INFO)
+ formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
+ file_handler = logging.FileHandler('%s/eval.txt' % experiment_dir)
+ file_handler.setLevel(logging.INFO)
+ file_handler.setFormatter(formatter)
+ logger.addHandler(file_handler)
+ log_string('PARAMETER ...')
+ log_string(args)
+
+ NUM_CLASSES = 13
+ BATCH_SIZE = args.batch_size
+ NUM_POINT = args.num_point
+
+ root = 'data/s3dis/stanford_indoor3d/'
+
+ TEST_DATASET_WHOLE_SCENE = ScannetDatasetWholeScene(root, split='test', test_area=args.test_area, block_points=NUM_POINT)
+ log_string("The number of test data is: %d" % len(TEST_DATASET_WHOLE_SCENE))
+
+ '''MODEL LOADING'''
+ model_name = os.listdir(experiment_dir + '/logs')[0].split('.')[0]
+ MODEL = importlib.import_module(model_name)
+ classifier = MODEL.get_model(NUM_CLASSES).cuda()
+ checkpoint = torch.load(str(experiment_dir) + '/checkpoints/best_model.pth')
+ classifier.load_state_dict(checkpoint['model_state_dict'])
+ classifier = classifier.eval()
+
+ with torch.no_grad():
+ scene_id = TEST_DATASET_WHOLE_SCENE.file_list
+ scene_id = [x[:-4] for x in scene_id]
+ num_batches = len(TEST_DATASET_WHOLE_SCENE)
+
+ total_seen_class = [0 for _ in range(NUM_CLASSES)]
+ total_correct_class = [0 for _ in range(NUM_CLASSES)]
+ total_iou_deno_class = [0 for _ in range(NUM_CLASSES)]
+
+ log_string('---- EVALUATION WHOLE SCENE----')
+
+ for batch_idx in range(num_batches):
+ print("Inference [%d/%d] %s ..." % (batch_idx + 1, num_batches, scene_id[batch_idx]))
+ total_seen_class_tmp = [0 for _ in range(NUM_CLASSES)]
+ total_correct_class_tmp = [0 for _ in range(NUM_CLASSES)]
+ total_iou_deno_class_tmp = [0 for _ in range(NUM_CLASSES)]
+ if args.visual:
+ fout = open(os.path.join(visual_dir, scene_id[batch_idx] + '_pred.obj'), 'w')
+ fout_gt = open(os.path.join(visual_dir, scene_id[batch_idx] + '_gt.obj'), 'w')
+
+ whole_scene_data = TEST_DATASET_WHOLE_SCENE.scene_points_list[batch_idx]
+ whole_scene_label = TEST_DATASET_WHOLE_SCENE.semantic_labels_list[batch_idx]
+ vote_label_pool = np.zeros((whole_scene_label.shape[0], NUM_CLASSES))
+ for _ in tqdm(range(args.num_votes), total=args.num_votes):
+ scene_data, scene_label, scene_smpw, scene_point_index = TEST_DATASET_WHOLE_SCENE[batch_idx]
+ num_blocks = scene_data.shape[0]
+ s_batch_num = (num_blocks + BATCH_SIZE - 1) // BATCH_SIZE
+ batch_data = np.zeros((BATCH_SIZE, NUM_POINT, 9))
+
+ batch_label = np.zeros((BATCH_SIZE, NUM_POINT))
+ batch_point_index = np.zeros((BATCH_SIZE, NUM_POINT))
+ batch_smpw = np.zeros((BATCH_SIZE, NUM_POINT))
+
+ for sbatch in range(s_batch_num):
+ start_idx = sbatch * BATCH_SIZE
+ end_idx = min((sbatch + 1) * BATCH_SIZE, num_blocks)
+ real_batch_size = end_idx - start_idx
+ batch_data[0:real_batch_size, ...] = scene_data[start_idx:end_idx, ...]
+ batch_label[0:real_batch_size, ...] = scene_label[start_idx:end_idx, ...]
+ batch_point_index[0:real_batch_size, ...] = scene_point_index[start_idx:end_idx, ...]
+ batch_smpw[0:real_batch_size, ...] = scene_smpw[start_idx:end_idx, ...]
+ batch_data[:, :, 3:6] /= 1.0
+
+ torch_data = torch.Tensor(batch_data)
+ torch_data = torch_data.float().cuda()
+ torch_data = torch_data.transpose(2, 1)
+ seg_pred, _ = classifier(torch_data)
+ batch_pred_label = seg_pred.contiguous().cpu().data.max(2)[1].numpy()
+
+ vote_label_pool = add_vote(vote_label_pool, batch_point_index[0:real_batch_size, ...],
+ batch_pred_label[0:real_batch_size, ...],
+ batch_smpw[0:real_batch_size, ...])
+
+ pred_label = np.argmax(vote_label_pool, 1)
+
+ for l in range(NUM_CLASSES):
+ total_seen_class_tmp[l] += np.sum((whole_scene_label == l))
+ total_correct_class_tmp[l] += np.sum((pred_label == l) & (whole_scene_label == l))
+ total_iou_deno_class_tmp[l] += np.sum(((pred_label == l) | (whole_scene_label == l)))
+ total_seen_class[l] += total_seen_class_tmp[l]
+ total_correct_class[l] += total_correct_class_tmp[l]
+ total_iou_deno_class[l] += total_iou_deno_class_tmp[l]
+
+ iou_map = np.array(total_correct_class_tmp) / (np.array(total_iou_deno_class_tmp, dtype=np.float) + 1e-6)
+ print(iou_map)
+ arr = np.array(total_seen_class_tmp)
+ tmp_iou = np.mean(iou_map[arr != 0])
+ log_string('Mean IoU of %s: %.4f' % (scene_id[batch_idx], tmp_iou))
+ print('----------------------------')
+
+ filename = os.path.join(visual_dir, scene_id[batch_idx] + '.txt')
+ with open(filename, 'w') as pl_save:
+ for i in pred_label:
+ pl_save.write(str(int(i)) + '\n')
+ pl_save.close()
+ for i in range(whole_scene_label.shape[0]):
+ color = g_label2color[pred_label[i]]
+ color_gt = g_label2color[whole_scene_label[i]]
+ if args.visual:
+ fout.write('v %f %f %f %d %d %d\n' % (
+ whole_scene_data[i, 0], whole_scene_data[i, 1], whole_scene_data[i, 2], color[0], color[1],
+ color[2]))
+ fout_gt.write(
+ 'v %f %f %f %d %d %d\n' % (
+ whole_scene_data[i, 0], whole_scene_data[i, 1], whole_scene_data[i, 2], color_gt[0],
+ color_gt[1], color_gt[2]))
+ if args.visual:
+ fout.close()
+ fout_gt.close()
+
+ IoU = np.array(total_correct_class) / (np.array(total_iou_deno_class, dtype=np.float) + 1e-6)
+ iou_per_class_str = '------- IoU --------\n'
+ for l in range(NUM_CLASSES):
+ iou_per_class_str += 'class %s, IoU: %.3f \n' % (
+ seg_label_to_cat[l] + ' ' * (14 - len(seg_label_to_cat[l])),
+ total_correct_class[l] / float(total_iou_deno_class[l]))
+ log_string(iou_per_class_str)
+ log_string('eval point avg class IoU: %f' % np.mean(IoU))
+ log_string('eval whole scene point avg class acc: %f' % (
+ np.mean(np.array(total_correct_class) / (np.array(total_seen_class, dtype=np.float) + 1e-6))))
+ log_string('eval whole scene point accuracy: %f' % (
+ np.sum(total_correct_class) / float(np.sum(total_seen_class) + 1e-6)))
+
+ print("Done!")
+
+
+if __name__ == '__main__':
+ args = parse_args()
+ main(args)
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/train_classification.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/train_classification.py
new file mode 100644
index 0000000000..229f49db4a
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/train_classification.py
@@ -0,0 +1,232 @@
+"""
+Author: Benny
+Date: Nov 2019
+"""
+
+import os
+import sys
+import torch
+import numpy as np
+
+import datetime
+import logging
+import provider
+import importlib
+import shutil
+import argparse
+
+from pathlib import Path
+from tqdm import tqdm
+from data_utils.ModelNetDataLoader import ModelNetDataLoader
+
+BASE_DIR = os.path.dirname(os.path.abspath(__file__))
+ROOT_DIR = BASE_DIR
+sys.path.append(os.path.join(ROOT_DIR, 'models'))
+
+def parse_args():
+ '''PARAMETERS'''
+ parser = argparse.ArgumentParser('training')
+ parser.add_argument('--use_cpu', action='store_true', default=False, help='use cpu mode')
+ parser.add_argument('--gpu', type=str, default='0', help='specify gpu device')
+ parser.add_argument('--batch_size', type=int, default=24, help='batch size in training')
+ parser.add_argument('--model', default='pointnet_cls', help='model name [default: pointnet_cls]')
+ parser.add_argument('--num_category', default=40, type=int, choices=[10, 40], help='training on ModelNet10/40')
+ parser.add_argument('--epoch', default=200, type=int, help='number of epoch in training')
+ parser.add_argument('--learning_rate', default=0.001, type=float, help='learning rate in training')
+ parser.add_argument('--num_point', type=int, default=1024, help='Point Number')
+ parser.add_argument('--optimizer', type=str, default='Adam', help='optimizer for training')
+ parser.add_argument('--log_dir', type=str, default=None, help='experiment root')
+ parser.add_argument('--decay_rate', type=float, default=1e-4, help='decay rate')
+ parser.add_argument('--use_normals', action='store_true', default=False, help='use normals')
+ parser.add_argument('--process_data', action='store_true', default=False, help='save data offline')
+ parser.add_argument('--use_uniform_sample', action='store_true', default=False, help='use uniform sampiling')
+ return parser.parse_args()
+
+
+def inplace_relu(m):
+ classname = m.__class__.__name__
+ if classname.find('ReLU') != -1:
+ m.inplace=True
+
+
+def test(model, loader, num_class=40):
+ mean_correct = []
+ class_acc = np.zeros((num_class, 3))
+ classifier = model.eval()
+
+ for j, (points, target) in tqdm(enumerate(loader), total=len(loader)):
+
+ if not args.use_cpu:
+ points, target = points.cuda(), target.cuda()
+
+ points = points.transpose(2, 1)
+ pred, _ = classifier(points)
+ pred_choice = pred.data.max(1)[1]
+
+ for cat in np.unique(target.cpu()):
+ classacc = pred_choice[target == cat].eq(target[target == cat].long().data).cpu().sum()
+ class_acc[cat, 0] += classacc.item() / float(points[target == cat].size()[0])
+ class_acc[cat, 1] += 1
+
+ correct = pred_choice.eq(target.long().data).cpu().sum()
+ mean_correct.append(correct.item() / float(points.size()[0]))
+
+ class_acc[:, 2] = class_acc[:, 0] / class_acc[:, 1]
+ class_acc = np.mean(class_acc[:, 2])
+ instance_acc = np.mean(mean_correct)
+
+ return instance_acc, class_acc
+
+
+def main(args):
+ def log_string(str):
+ logger.info(str)
+ print(str)
+
+ '''HYPER PARAMETER'''
+ os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu
+
+ '''CREATE DIR'''
+ timestr = str(datetime.datetime.now().strftime('%Y-%m-%d_%H-%M'))
+ exp_dir = Path('./log/')
+ exp_dir.mkdir(exist_ok=True)
+ exp_dir = exp_dir.joinpath('classification')
+ exp_dir.mkdir(exist_ok=True)
+ if args.log_dir is None:
+ exp_dir = exp_dir.joinpath(timestr)
+ else:
+ exp_dir = exp_dir.joinpath(args.log_dir)
+ exp_dir.mkdir(exist_ok=True)
+ checkpoints_dir = exp_dir.joinpath('checkpoints/')
+ checkpoints_dir.mkdir(exist_ok=True)
+ log_dir = exp_dir.joinpath('logs/')
+ log_dir.mkdir(exist_ok=True)
+
+ '''LOG'''
+ args = parse_args()
+ logger = logging.getLogger("Model")
+ logger.setLevel(logging.INFO)
+ formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
+ file_handler = logging.FileHandler('%s/%s.txt' % (log_dir, args.model))
+ file_handler.setLevel(logging.INFO)
+ file_handler.setFormatter(formatter)
+ logger.addHandler(file_handler)
+ log_string('PARAMETER ...')
+ log_string(args)
+
+ '''DATA LOADING'''
+ log_string('Load dataset ...')
+ data_path = 'data/modelnet40_normal_resampled/'
+
+ train_dataset = ModelNetDataLoader(root=data_path, args=args, split='train', process_data=args.process_data)
+ test_dataset = ModelNetDataLoader(root=data_path, args=args, split='test', process_data=args.process_data)
+ trainDataLoader = torch.utils.data.DataLoader(train_dataset, batch_size=args.batch_size, shuffle=True, num_workers=10, drop_last=True)
+ testDataLoader = torch.utils.data.DataLoader(test_dataset, batch_size=args.batch_size, shuffle=False, num_workers=10)
+
+ '''MODEL LOADING'''
+ num_class = args.num_category
+ model = importlib.import_module(args.model)
+ shutil.copy('./models/%s.py' % args.model, str(exp_dir))
+ shutil.copy('models/pointnet2_utils.py', str(exp_dir))
+ shutil.copy('./train_classification.py', str(exp_dir))
+
+ classifier = model.get_model(num_class, normal_channel=args.use_normals)
+ criterion = model.get_loss()
+ classifier.apply(inplace_relu)
+
+ if not args.use_cpu:
+ classifier = classifier.cuda()
+ criterion = criterion.cuda()
+
+ try:
+ checkpoint = torch.load(str(exp_dir) + '/checkpoints/best_model.pth')
+ start_epoch = checkpoint['epoch']
+ classifier.load_state_dict(checkpoint['model_state_dict'])
+ log_string('Use pretrain model')
+ except:
+ log_string('No existing model, starting training from scratch...')
+ start_epoch = 0
+
+ if args.optimizer == 'Adam':
+ optimizer = torch.optim.Adam(
+ classifier.parameters(),
+ lr=args.learning_rate,
+ betas=(0.9, 0.999),
+ eps=1e-08,
+ weight_decay=args.decay_rate
+ )
+ else:
+ optimizer = torch.optim.SGD(classifier.parameters(), lr=0.01, momentum=0.9)
+
+ scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=20, gamma=0.7)
+ global_epoch = 0
+ global_step = 0
+ best_instance_acc = 0.0
+ best_class_acc = 0.0
+
+ '''TRANING'''
+ logger.info('Start training...')
+ for epoch in range(start_epoch, args.epoch):
+ log_string('Epoch %d (%d/%s):' % (global_epoch + 1, epoch + 1, args.epoch))
+ mean_correct = []
+ classifier = classifier.train()
+
+ scheduler.step()
+ for batch_id, (points, target) in tqdm(enumerate(trainDataLoader, 0), total=len(trainDataLoader), smoothing=0.9):
+ optimizer.zero_grad()
+
+ points = points.data.numpy()
+ points = provider.random_point_dropout(points)
+ points[:, :, 0:3] = provider.random_scale_point_cloud(points[:, :, 0:3])
+ points[:, :, 0:3] = provider.shift_point_cloud(points[:, :, 0:3])
+ points = torch.Tensor(points)
+ points = points.transpose(2, 1)
+
+ if not args.use_cpu:
+ points, target = points.cuda(), target.cuda()
+
+ pred, trans_feat = classifier(points)
+ loss = criterion(pred, target.long(), trans_feat)
+ pred_choice = pred.data.max(1)[1]
+
+ correct = pred_choice.eq(target.long().data).cpu().sum()
+ mean_correct.append(correct.item() / float(points.size()[0]))
+ loss.backward()
+ optimizer.step()
+ global_step += 1
+
+ train_instance_acc = np.mean(mean_correct)
+ log_string('Train Instance Accuracy: %f' % train_instance_acc)
+
+ with torch.no_grad():
+ instance_acc, class_acc = test(classifier.eval(), testDataLoader, num_class=num_class)
+
+ if (instance_acc >= best_instance_acc):
+ best_instance_acc = instance_acc
+ best_epoch = epoch + 1
+
+ if (class_acc >= best_class_acc):
+ best_class_acc = class_acc
+ log_string('Test Instance Accuracy: %f, Class Accuracy: %f' % (instance_acc, class_acc))
+ log_string('Best Instance Accuracy: %f, Class Accuracy: %f' % (best_instance_acc, best_class_acc))
+
+ if (instance_acc >= best_instance_acc):
+ logger.info('Save model...')
+ savepath = str(checkpoints_dir) + '/best_model.pth'
+ log_string('Saving at %s' % savepath)
+ state = {
+ 'epoch': best_epoch,
+ 'instance_acc': instance_acc,
+ 'class_acc': class_acc,
+ 'model_state_dict': classifier.state_dict(),
+ 'optimizer_state_dict': optimizer.state_dict(),
+ }
+ torch.save(state, savepath)
+ global_epoch += 1
+
+ logger.info('End of training...')
+
+
+if __name__ == '__main__':
+ args = parse_args()
+ main(args)
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/train_partseg.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/train_partseg.py
new file mode 100644
index 0000000000..bb85e434e5
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/train_partseg.py
@@ -0,0 +1,305 @@
+"""
+Author: Benny
+Date: Nov 2019
+"""
+import argparse
+import os
+import torch
+import datetime
+import logging
+import sys
+import importlib
+import shutil
+import provider
+import numpy as np
+
+from pathlib import Path
+from tqdm import tqdm
+from data_utils.ShapeNetDataLoader import PartNormalDataset
+
+BASE_DIR = os.path.dirname(os.path.abspath(__file__))
+ROOT_DIR = BASE_DIR
+sys.path.append(os.path.join(ROOT_DIR, 'models'))
+
+seg_classes = {'Earphone': [16, 17, 18], 'Motorbike': [30, 31, 32, 33, 34, 35], 'Rocket': [41, 42, 43],
+ 'Car': [8, 9, 10, 11], 'Laptop': [28, 29], 'Cap': [6, 7], 'Skateboard': [44, 45, 46], 'Mug': [36, 37],
+ 'Guitar': [19, 20, 21], 'Bag': [4, 5], 'Lamp': [24, 25, 26, 27], 'Table': [47, 48, 49],
+ 'Airplane': [0, 1, 2, 3], 'Pistol': [38, 39, 40], 'Chair': [12, 13, 14, 15], 'Knife': [22, 23]}
+seg_label_to_cat = {} # {0:Airplane, 1:Airplane, ...49:Table}
+for cat in seg_classes.keys():
+ for label in seg_classes[cat]:
+ seg_label_to_cat[label] = cat
+
+
+def inplace_relu(m):
+ classname = m.__class__.__name__
+ if classname.find('ReLU') != -1:
+ m.inplace=True
+
+def to_categorical(y, num_classes):
+ """ 1-hot encodes a tensor """
+ new_y = torch.eye(num_classes)[y.cpu().data.numpy(),]
+ if (y.is_cuda):
+ return new_y.cuda()
+ return new_y
+
+
+def parse_args():
+ parser = argparse.ArgumentParser('Model')
+ parser.add_argument('--model', type=str, default='pointnet_part_seg', help='model name')
+ parser.add_argument('--batch_size', type=int, default=16, help='batch Size during training')
+ parser.add_argument('--epoch', default=251, type=int, help='epoch to run')
+ parser.add_argument('--learning_rate', default=0.001, type=float, help='initial learning rate')
+ parser.add_argument('--gpu', type=str, default='0', help='specify GPU devices')
+ parser.add_argument('--optimizer', type=str, default='Adam', help='Adam or SGD')
+ parser.add_argument('--log_dir', type=str, default=None, help='log path')
+ parser.add_argument('--decay_rate', type=float, default=1e-4, help='weight decay')
+ parser.add_argument('--npoint', type=int, default=2048, help='point Number')
+ parser.add_argument('--normal', action='store_true', default=False, help='use normals')
+ parser.add_argument('--step_size', type=int, default=20, help='decay step for lr decay')
+ parser.add_argument('--lr_decay', type=float, default=0.5, help='decay rate for lr decay')
+
+ return parser.parse_args()
+
+
+def main(args):
+ def log_string(str):
+ logger.info(str)
+ print(str)
+
+ '''HYPER PARAMETER'''
+ os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu
+
+ '''CREATE DIR'''
+ timestr = str(datetime.datetime.now().strftime('%Y-%m-%d_%H-%M'))
+ exp_dir = Path('./log/')
+ exp_dir.mkdir(exist_ok=True)
+ exp_dir = exp_dir.joinpath('part_seg')
+ exp_dir.mkdir(exist_ok=True)
+ if args.log_dir is None:
+ exp_dir = exp_dir.joinpath(timestr)
+ else:
+ exp_dir = exp_dir.joinpath(args.log_dir)
+ exp_dir.mkdir(exist_ok=True)
+ checkpoints_dir = exp_dir.joinpath('checkpoints/')
+ checkpoints_dir.mkdir(exist_ok=True)
+ log_dir = exp_dir.joinpath('logs/')
+ log_dir.mkdir(exist_ok=True)
+
+ '''LOG'''
+ args = parse_args()
+ logger = logging.getLogger("Model")
+ logger.setLevel(logging.INFO)
+ formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
+ file_handler = logging.FileHandler('%s/%s.txt' % (log_dir, args.model))
+ file_handler.setLevel(logging.INFO)
+ file_handler.setFormatter(formatter)
+ logger.addHandler(file_handler)
+ log_string('PARAMETER ...')
+ log_string(args)
+
+ root = 'data/shapenetcore_partanno_segmentation_benchmark_v0_normal/'
+
+ TRAIN_DATASET = PartNormalDataset(root=root, npoints=args.npoint, split='trainval', normal_channel=args.normal)
+ trainDataLoader = torch.utils.data.DataLoader(TRAIN_DATASET, batch_size=args.batch_size, shuffle=True, num_workers=10, drop_last=True)
+ TEST_DATASET = PartNormalDataset(root=root, npoints=args.npoint, split='test', normal_channel=args.normal)
+ testDataLoader = torch.utils.data.DataLoader(TEST_DATASET, batch_size=args.batch_size, shuffle=False, num_workers=10)
+ log_string("The number of training data is: %d" % len(TRAIN_DATASET))
+ log_string("The number of test data is: %d" % len(TEST_DATASET))
+
+ num_classes = 16
+ num_part = 50
+
+ '''MODEL LOADING'''
+ MODEL = importlib.import_module(args.model)
+ shutil.copy('models/%s.py' % args.model, str(exp_dir))
+ shutil.copy('models/pointnet2_utils.py', str(exp_dir))
+
+ classifier = MODEL.get_model(num_part, normal_channel=args.normal).cuda()
+ criterion = MODEL.get_loss().cuda()
+ classifier.apply(inplace_relu)
+
+ def weights_init(m):
+ classname = m.__class__.__name__
+ if classname.find('Conv2d') != -1:
+ torch.nn.init.xavier_normal_(m.weight.data)
+ torch.nn.init.constant_(m.bias.data, 0.0)
+ elif classname.find('Linear') != -1:
+ torch.nn.init.xavier_normal_(m.weight.data)
+ torch.nn.init.constant_(m.bias.data, 0.0)
+
+ try:
+ checkpoint = torch.load(str(exp_dir) + '/checkpoints/best_model.pth')
+ start_epoch = checkpoint['epoch']
+ classifier.load_state_dict(checkpoint['model_state_dict'])
+ log_string('Use pretrain model')
+ except:
+ log_string('No existing model, starting training from scratch...')
+ start_epoch = 0
+ classifier = classifier.apply(weights_init)
+
+ if args.optimizer == 'Adam':
+ optimizer = torch.optim.Adam(
+ classifier.parameters(),
+ lr=args.learning_rate,
+ betas=(0.9, 0.999),
+ eps=1e-08,
+ weight_decay=args.decay_rate
+ )
+ else:
+ optimizer = torch.optim.SGD(classifier.parameters(), lr=args.learning_rate, momentum=0.9)
+
+ def bn_momentum_adjust(m, momentum):
+ if isinstance(m, torch.nn.BatchNorm2d) or isinstance(m, torch.nn.BatchNorm1d):
+ m.momentum = momentum
+
+ LEARNING_RATE_CLIP = 1e-5
+ MOMENTUM_ORIGINAL = 0.1
+ MOMENTUM_DECCAY = 0.5
+ MOMENTUM_DECCAY_STEP = args.step_size
+
+ best_acc = 0
+ global_epoch = 0
+ best_class_avg_iou = 0
+ best_inctance_avg_iou = 0
+
+ for epoch in range(start_epoch, args.epoch):
+ mean_correct = []
+
+ log_string('Epoch %d (%d/%s):' % (global_epoch + 1, epoch + 1, args.epoch))
+ '''Adjust learning rate and BN momentum'''
+ lr = max(args.learning_rate * (args.lr_decay ** (epoch // args.step_size)), LEARNING_RATE_CLIP)
+ log_string('Learning rate:%f' % lr)
+ for param_group in optimizer.param_groups:
+ param_group['lr'] = lr
+ momentum = MOMENTUM_ORIGINAL * (MOMENTUM_DECCAY ** (epoch // MOMENTUM_DECCAY_STEP))
+ if momentum < 0.01:
+ momentum = 0.01
+ print('BN momentum updated to: %f' % momentum)
+ classifier = classifier.apply(lambda x: bn_momentum_adjust(x, momentum))
+ classifier = classifier.train()
+
+ '''learning one epoch'''
+ for i, (points, label, target) in tqdm(enumerate(trainDataLoader), total=len(trainDataLoader), smoothing=0.9):
+ optimizer.zero_grad()
+
+ points = points.data.numpy()
+ points[:, :, 0:3] = provider.random_scale_point_cloud(points[:, :, 0:3])
+ points[:, :, 0:3] = provider.shift_point_cloud(points[:, :, 0:3])
+ points = torch.Tensor(points)
+ points, label, target = points.float().cuda(), label.long().cuda(), target.long().cuda()
+ points = points.transpose(2, 1)
+
+ seg_pred, trans_feat = classifier(points, to_categorical(label, num_classes))
+ seg_pred = seg_pred.contiguous().view(-1, num_part)
+ target = target.view(-1, 1)[:, 0]
+ pred_choice = seg_pred.data.max(1)[1]
+
+ correct = pred_choice.eq(target.data).cpu().sum()
+ mean_correct.append(correct.item() / (args.batch_size * args.npoint))
+ loss = criterion(seg_pred, target, trans_feat)
+ loss.backward()
+ optimizer.step()
+
+ train_instance_acc = np.mean(mean_correct)
+ log_string('Train accuracy is: %.5f' % train_instance_acc)
+
+ with torch.no_grad():
+ test_metrics = {}
+ total_correct = 0
+ total_seen = 0
+ total_seen_class = [0 for _ in range(num_part)]
+ total_correct_class = [0 for _ in range(num_part)]
+ shape_ious = {cat: [] for cat in seg_classes.keys()}
+ seg_label_to_cat = {} # {0:Airplane, 1:Airplane, ...49:Table}
+
+ for cat in seg_classes.keys():
+ for label in seg_classes[cat]:
+ seg_label_to_cat[label] = cat
+
+ classifier = classifier.eval()
+
+ for batch_id, (points, label, target) in tqdm(enumerate(testDataLoader), total=len(testDataLoader), smoothing=0.9):
+ cur_batch_size, NUM_POINT, _ = points.size()
+ points, label, target = points.float().cuda(), label.long().cuda(), target.long().cuda()
+ points = points.transpose(2, 1)
+ seg_pred, _ = classifier(points, to_categorical(label, num_classes))
+ cur_pred_val = seg_pred.cpu().data.numpy()
+ cur_pred_val_logits = cur_pred_val
+ cur_pred_val = np.zeros((cur_batch_size, NUM_POINT)).astype(np.int32)
+ target = target.cpu().data.numpy()
+
+ for i in range(cur_batch_size):
+ cat = seg_label_to_cat[target[i, 0]]
+ logits = cur_pred_val_logits[i, :, :]
+ cur_pred_val[i, :] = np.argmax(logits[:, seg_classes[cat]], 1) + seg_classes[cat][0]
+
+ correct = np.sum(cur_pred_val == target)
+ total_correct += correct
+ total_seen += (cur_batch_size * NUM_POINT)
+
+ for l in range(num_part):
+ total_seen_class[l] += np.sum(target == l)
+ total_correct_class[l] += (np.sum((cur_pred_val == l) & (target == l)))
+
+ for i in range(cur_batch_size):
+ segp = cur_pred_val[i, :]
+ segl = target[i, :]
+ cat = seg_label_to_cat[segl[0]]
+ part_ious = [0.0 for _ in range(len(seg_classes[cat]))]
+ for l in seg_classes[cat]:
+ if (np.sum(segl == l) == 0) and (
+ np.sum(segp == l) == 0): # part is not present, no prediction as well
+ part_ious[l - seg_classes[cat][0]] = 1.0
+ else:
+ part_ious[l - seg_classes[cat][0]] = np.sum((segl == l) & (segp == l)) / float(
+ np.sum((segl == l) | (segp == l)))
+ shape_ious[cat].append(np.mean(part_ious))
+
+ all_shape_ious = []
+ for cat in shape_ious.keys():
+ for iou in shape_ious[cat]:
+ all_shape_ious.append(iou)
+ shape_ious[cat] = np.mean(shape_ious[cat])
+ mean_shape_ious = np.mean(list(shape_ious.values()))
+ test_metrics['accuracy'] = total_correct / float(total_seen)
+ test_metrics['class_avg_accuracy'] = np.mean(
+ np.array(total_correct_class) / np.array(total_seen_class, dtype=np.float))
+ for cat in sorted(shape_ious.keys()):
+ log_string('eval mIoU of %s %f' % (cat + ' ' * (14 - len(cat)), shape_ious[cat]))
+ test_metrics['class_avg_iou'] = mean_shape_ious
+ test_metrics['inctance_avg_iou'] = np.mean(all_shape_ious)
+
+ log_string('Epoch %d test Accuracy: %f Class avg mIOU: %f Inctance avg mIOU: %f' % (
+ epoch + 1, test_metrics['accuracy'], test_metrics['class_avg_iou'], test_metrics['inctance_avg_iou']))
+ if (test_metrics['inctance_avg_iou'] >= best_inctance_avg_iou):
+ logger.info('Save model...')
+ savepath = str(checkpoints_dir) + '/best_model.pth'
+ log_string('Saving at %s' % savepath)
+ state = {
+ 'epoch': epoch,
+ 'train_acc': train_instance_acc,
+ 'test_acc': test_metrics['accuracy'],
+ 'class_avg_iou': test_metrics['class_avg_iou'],
+ 'inctance_avg_iou': test_metrics['inctance_avg_iou'],
+ 'model_state_dict': classifier.state_dict(),
+ 'optimizer_state_dict': optimizer.state_dict(),
+ }
+ torch.save(state, savepath)
+ log_string('Saving model....')
+
+ if test_metrics['accuracy'] > best_acc:
+ best_acc = test_metrics['accuracy']
+ if test_metrics['class_avg_iou'] > best_class_avg_iou:
+ best_class_avg_iou = test_metrics['class_avg_iou']
+ if test_metrics['inctance_avg_iou'] > best_inctance_avg_iou:
+ best_inctance_avg_iou = test_metrics['inctance_avg_iou']
+ log_string('Best accuracy is: %.5f' % best_acc)
+ log_string('Best class avg mIOU is: %.5f' % best_class_avg_iou)
+ log_string('Best inctance avg mIOU is: %.5f' % best_inctance_avg_iou)
+ global_epoch += 1
+
+
+if __name__ == '__main__':
+ args = parse_args()
+ main(args)
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/train_semseg.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/train_semseg.py
new file mode 100644
index 0000000000..fd1df5f882
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/train_semseg.py
@@ -0,0 +1,294 @@
+"""
+Author: Benny
+Date: Nov 2019
+"""
+import argparse
+import os
+from data_utils.S3DISDataLoader import S3DISDataset
+import torch
+import datetime
+import logging
+from pathlib import Path
+import sys
+import importlib
+import shutil
+from tqdm import tqdm
+import provider
+import numpy as np
+import time
+
+BASE_DIR = os.path.dirname(os.path.abspath(__file__))
+ROOT_DIR = BASE_DIR
+sys.path.append(os.path.join(ROOT_DIR, 'models'))
+
+classes = ['ceiling', 'floor', 'wall', 'beam', 'column', 'window', 'door', 'table', 'chair', 'sofa', 'bookcase',
+ 'board', 'clutter']
+class2label = {cls: i for i, cls in enumerate(classes)}
+seg_classes = class2label
+seg_label_to_cat = {}
+for i, cat in enumerate(seg_classes.keys()):
+ seg_label_to_cat[i] = cat
+
+def inplace_relu(m):
+ classname = m.__class__.__name__
+ if classname.find('ReLU') != -1:
+ m.inplace=True
+
+def parse_args():
+ parser = argparse.ArgumentParser('Model')
+ parser.add_argument('--model', type=str, default='pointnet_sem_seg', help='model name [default: pointnet_sem_seg]')
+ parser.add_argument('--batch_size', type=int, default=16, help='Batch Size during training [default: 16]')
+ parser.add_argument('--epoch', default=32, type=int, help='Epoch to run [default: 32]')
+ parser.add_argument('--learning_rate', default=0.001, type=float, help='Initial learning rate [default: 0.001]')
+ parser.add_argument('--gpu', type=str, default='0', help='GPU to use [default: GPU 0]')
+ parser.add_argument('--optimizer', type=str, default='Adam', help='Adam or SGD [default: Adam]')
+ parser.add_argument('--log_dir', type=str, default=None, help='Log path [default: None]')
+ parser.add_argument('--decay_rate', type=float, default=1e-4, help='weight decay [default: 1e-4]')
+ parser.add_argument('--npoint', type=int, default=4096, help='Point Number [default: 4096]')
+ parser.add_argument('--step_size', type=int, default=10, help='Decay step for lr decay [default: every 10 epochs]')
+ parser.add_argument('--lr_decay', type=float, default=0.7, help='Decay rate for lr decay [default: 0.7]')
+ parser.add_argument('--test_area', type=int, default=5, help='Which area to use for test, option: 1-6 [default: 5]')
+
+ return parser.parse_args()
+
+
+def main(args):
+ def log_string(str):
+ logger.info(str)
+ print(str)
+
+ '''HYPER PARAMETER'''
+ os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu
+
+ '''CREATE DIR'''
+ timestr = str(datetime.datetime.now().strftime('%Y-%m-%d_%H-%M'))
+ experiment_dir = Path('./log/')
+ experiment_dir.mkdir(exist_ok=True)
+ experiment_dir = experiment_dir.joinpath('sem_seg')
+ experiment_dir.mkdir(exist_ok=True)
+ if args.log_dir is None:
+ experiment_dir = experiment_dir.joinpath(timestr)
+ else:
+ experiment_dir = experiment_dir.joinpath(args.log_dir)
+ experiment_dir.mkdir(exist_ok=True)
+ checkpoints_dir = experiment_dir.joinpath('checkpoints/')
+ checkpoints_dir.mkdir(exist_ok=True)
+ log_dir = experiment_dir.joinpath('logs/')
+ log_dir.mkdir(exist_ok=True)
+
+ '''LOG'''
+ args = parse_args()
+ logger = logging.getLogger("Model")
+ logger.setLevel(logging.INFO)
+ formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
+ file_handler = logging.FileHandler('%s/%s.txt' % (log_dir, args.model))
+ file_handler.setLevel(logging.INFO)
+ file_handler.setFormatter(formatter)
+ logger.addHandler(file_handler)
+ log_string('PARAMETER ...')
+ log_string(args)
+
+ root = 'data/stanford_indoor3d/'
+ NUM_CLASSES = 13
+ NUM_POINT = args.npoint
+ BATCH_SIZE = args.batch_size
+
+ print("start loading training data ...")
+ TRAIN_DATASET = S3DISDataset(split='train', data_root=root, num_point=NUM_POINT, test_area=args.test_area, block_size=1.0, sample_rate=1.0, transform=None)
+ print("start loading test data ...")
+ TEST_DATASET = S3DISDataset(split='test', data_root=root, num_point=NUM_POINT, test_area=args.test_area, block_size=1.0, sample_rate=1.0, transform=None)
+
+ trainDataLoader = torch.utils.data.DataLoader(TRAIN_DATASET, batch_size=BATCH_SIZE, shuffle=True, num_workers=10,
+ pin_memory=True, drop_last=True,
+ worker_init_fn=lambda x: np.random.seed(x + int(time.time())))
+ testDataLoader = torch.utils.data.DataLoader(TEST_DATASET, batch_size=BATCH_SIZE, shuffle=False, num_workers=10,
+ pin_memory=True, drop_last=True)
+ weights = torch.Tensor(TRAIN_DATASET.labelweights).cuda()
+
+ log_string("The number of training data is: %d" % len(TRAIN_DATASET))
+ log_string("The number of test data is: %d" % len(TEST_DATASET))
+
+ '''MODEL LOADING'''
+ MODEL = importlib.import_module(args.model)
+ shutil.copy('models/%s.py' % args.model, str(experiment_dir))
+ shutil.copy('models/pointnet2_utils.py', str(experiment_dir))
+
+ classifier = MODEL.get_model(NUM_CLASSES).cuda()
+ criterion = MODEL.get_loss().cuda()
+ classifier.apply(inplace_relu)
+
+ def weights_init(m):
+ classname = m.__class__.__name__
+ if classname.find('Conv2d') != -1:
+ torch.nn.init.xavier_normal_(m.weight.data)
+ torch.nn.init.constant_(m.bias.data, 0.0)
+ elif classname.find('Linear') != -1:
+ torch.nn.init.xavier_normal_(m.weight.data)
+ torch.nn.init.constant_(m.bias.data, 0.0)
+
+ try:
+ checkpoint = torch.load(str(experiment_dir) + '/checkpoints/best_model.pth')
+ start_epoch = checkpoint['epoch']
+ classifier.load_state_dict(checkpoint['model_state_dict'])
+ log_string('Use pretrain model')
+ except:
+ log_string('No existing model, starting training from scratch...')
+ start_epoch = 0
+ classifier = classifier.apply(weights_init)
+
+ if args.optimizer == 'Adam':
+ optimizer = torch.optim.Adam(
+ classifier.parameters(),
+ lr=args.learning_rate,
+ betas=(0.9, 0.999),
+ eps=1e-08,
+ weight_decay=args.decay_rate
+ )
+ else:
+ optimizer = torch.optim.SGD(classifier.parameters(), lr=args.learning_rate, momentum=0.9)
+
+ def bn_momentum_adjust(m, momentum):
+ if isinstance(m, torch.nn.BatchNorm2d) or isinstance(m, torch.nn.BatchNorm1d):
+ m.momentum = momentum
+
+ LEARNING_RATE_CLIP = 1e-5
+ MOMENTUM_ORIGINAL = 0.1
+ MOMENTUM_DECCAY = 0.5
+ MOMENTUM_DECCAY_STEP = args.step_size
+
+ global_epoch = 0
+ best_iou = 0
+
+ for epoch in range(start_epoch, args.epoch):
+ '''Train on chopped scenes'''
+ log_string('**** Epoch %d (%d/%s) ****' % (global_epoch + 1, epoch + 1, args.epoch))
+ lr = max(args.learning_rate * (args.lr_decay ** (epoch // args.step_size)), LEARNING_RATE_CLIP)
+ log_string('Learning rate:%f' % lr)
+ for param_group in optimizer.param_groups:
+ param_group['lr'] = lr
+ momentum = MOMENTUM_ORIGINAL * (MOMENTUM_DECCAY ** (epoch // MOMENTUM_DECCAY_STEP))
+ if momentum < 0.01:
+ momentum = 0.01
+ print('BN momentum updated to: %f' % momentum)
+ classifier = classifier.apply(lambda x: bn_momentum_adjust(x, momentum))
+ num_batches = len(trainDataLoader)
+ total_correct = 0
+ total_seen = 0
+ loss_sum = 0
+ classifier = classifier.train()
+
+ for i, (points, target) in tqdm(enumerate(trainDataLoader), total=len(trainDataLoader), smoothing=0.9):
+ optimizer.zero_grad()
+
+ points = points.data.numpy()
+ points[:, :, :3] = provider.rotate_point_cloud_z(points[:, :, :3])
+ points = torch.Tensor(points)
+ points, target = points.float().cuda(), target.long().cuda()
+ points = points.transpose(2, 1)
+
+ seg_pred, trans_feat = classifier(points)
+ seg_pred = seg_pred.contiguous().view(-1, NUM_CLASSES)
+
+ batch_label = target.view(-1, 1)[:, 0].cpu().data.numpy()
+ target = target.view(-1, 1)[:, 0]
+ loss = criterion(seg_pred, target, trans_feat, weights)
+ loss.backward()
+ optimizer.step()
+
+ pred_choice = seg_pred.cpu().data.max(1)[1].numpy()
+ correct = np.sum(pred_choice == batch_label)
+ total_correct += correct
+ total_seen += (BATCH_SIZE * NUM_POINT)
+ loss_sum += loss
+ log_string('Training mean loss: %f' % (loss_sum / num_batches))
+ log_string('Training accuracy: %f' % (total_correct / float(total_seen)))
+
+ if epoch % 5 == 0:
+ logger.info('Save model...')
+ savepath = str(checkpoints_dir) + '/model.pth'
+ log_string('Saving at %s' % savepath)
+ state = {
+ 'epoch': epoch,
+ 'model_state_dict': classifier.state_dict(),
+ 'optimizer_state_dict': optimizer.state_dict(),
+ }
+ torch.save(state, savepath)
+ log_string('Saving model....')
+
+ '''Evaluate on chopped scenes'''
+ with torch.no_grad():
+ num_batches = len(testDataLoader)
+ total_correct = 0
+ total_seen = 0
+ loss_sum = 0
+ labelweights = np.zeros(NUM_CLASSES)
+ total_seen_class = [0 for _ in range(NUM_CLASSES)]
+ total_correct_class = [0 for _ in range(NUM_CLASSES)]
+ total_iou_deno_class = [0 for _ in range(NUM_CLASSES)]
+ classifier = classifier.eval()
+
+ log_string('---- EPOCH %03d EVALUATION ----' % (global_epoch + 1))
+ for i, (points, target) in tqdm(enumerate(testDataLoader), total=len(testDataLoader), smoothing=0.9):
+ points = points.data.numpy()
+ points = torch.Tensor(points)
+ points, target = points.float().cuda(), target.long().cuda()
+ points = points.transpose(2, 1)
+
+ seg_pred, trans_feat = classifier(points)
+ pred_val = seg_pred.contiguous().cpu().data.numpy()
+ seg_pred = seg_pred.contiguous().view(-1, NUM_CLASSES)
+
+ batch_label = target.cpu().data.numpy()
+ target = target.view(-1, 1)[:, 0]
+ loss = criterion(seg_pred, target, trans_feat, weights)
+ loss_sum += loss
+ pred_val = np.argmax(pred_val, 2)
+ correct = np.sum((pred_val == batch_label))
+ total_correct += correct
+ total_seen += (BATCH_SIZE * NUM_POINT)
+ tmp, _ = np.histogram(batch_label, range(NUM_CLASSES + 1))
+ labelweights += tmp
+
+ for l in range(NUM_CLASSES):
+ total_seen_class[l] += np.sum((batch_label == l))
+ total_correct_class[l] += np.sum((pred_val == l) & (batch_label == l))
+ total_iou_deno_class[l] += np.sum(((pred_val == l) | (batch_label == l)))
+
+ labelweights = labelweights.astype(np.float32) / np.sum(labelweights.astype(np.float32))
+ mIoU = np.mean(np.array(total_correct_class) / (np.array(total_iou_deno_class, dtype=np.float) + 1e-6))
+ log_string('eval mean loss: %f' % (loss_sum / float(num_batches)))
+ log_string('eval point avg class IoU: %f' % (mIoU))
+ log_string('eval point accuracy: %f' % (total_correct / float(total_seen)))
+ log_string('eval point avg class acc: %f' % (
+ np.mean(np.array(total_correct_class) / (np.array(total_seen_class, dtype=np.float) + 1e-6))))
+
+ iou_per_class_str = '------- IoU --------\n'
+ for l in range(NUM_CLASSES):
+ iou_per_class_str += 'class %s weight: %.3f, IoU: %.3f \n' % (
+ seg_label_to_cat[l] + ' ' * (14 - len(seg_label_to_cat[l])), labelweights[l - 1],
+ total_correct_class[l] / float(total_iou_deno_class[l]))
+
+ log_string(iou_per_class_str)
+ log_string('Eval mean loss: %f' % (loss_sum / num_batches))
+ log_string('Eval accuracy: %f' % (total_correct / float(total_seen)))
+
+ if mIoU >= best_iou:
+ best_iou = mIoU
+ logger.info('Save model...')
+ savepath = str(checkpoints_dir) + '/best_model.pth'
+ log_string('Saving at %s' % savepath)
+ state = {
+ 'epoch': epoch,
+ 'class_avg_iou': mIoU,
+ 'model_state_dict': classifier.state_dict(),
+ 'optimizer_state_dict': optimizer.state_dict(),
+ }
+ torch.save(state, savepath)
+ log_string('Saving model....')
+ log_string('Best mIoU: %f' % best_iou)
+ global_epoch += 1
+
+
+if __name__ == '__main__':
+ args = parse_args()
+ main(args)
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/visualizer/build.sh b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/visualizer/build.sh
new file mode 100644
index 0000000000..3bceac2a83
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/visualizer/build.sh
@@ -0,0 +1 @@
+g++ -std=c++11 render_balls_so.cpp -o render_balls_so.so -shared -fPIC -O2 -D_GLIBCXX_USE_CXX11_ABI=0
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/visualizer/eulerangles.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/visualizer/eulerangles.py
new file mode 100644
index 0000000000..dbcad5f1fe
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/visualizer/eulerangles.py
@@ -0,0 +1,359 @@
+# emacs: -*- mode: python-mode; py-indent-offset: 4; indent-tabs-mode: nil -*-
+# vi: set ft=python sts=4 ts=4 sw=4 et:
+### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
+#
+# See COPYING file distributed along with the NiBabel package for the
+# copyright and license terms.
+#
+### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
+''' Module implementing Euler angle rotations and their conversions
+See:
+* http://en.wikipedia.org/wiki/Rotation_matrix
+* http://en.wikipedia.org/wiki/Euler_angles
+* http://mathworld.wolfram.com/EulerAngles.html
+See also: *Representing Attitude with Euler Angles and Quaternions: A
+Reference* (2006) by James Diebel. A cached PDF link last found here:
+http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.110.5134
+Euler's rotation theorem tells us that any rotation in 3D can be
+described by 3 angles. Let's call the 3 angles the *Euler angle vector*
+and call the angles in the vector :math:`alpha`, :math:`beta` and
+:math:`gamma`. The vector is [ :math:`alpha`,
+:math:`beta`. :math:`gamma` ] and, in this description, the order of the
+parameters specifies the order in which the rotations occur (so the
+rotation corresponding to :math:`alpha` is applied first).
+In order to specify the meaning of an *Euler angle vector* we need to
+specify the axes around which each of the rotations corresponding to
+:math:`alpha`, :math:`beta` and :math:`gamma` will occur.
+There are therefore three axes for the rotations :math:`alpha`,
+:math:`beta` and :math:`gamma`; let's call them :math:`i` :math:`j`,
+:math:`k`.
+Let us express the rotation :math:`alpha` around axis `i` as a 3 by 3
+rotation matrix `A`. Similarly :math:`beta` around `j` becomes 3 x 3
+matrix `B` and :math:`gamma` around `k` becomes matrix `G`. Then the
+whole rotation expressed by the Euler angle vector [ :math:`alpha`,
+:math:`beta`. :math:`gamma` ], `R` is given by::
+ R = np.dot(G, np.dot(B, A))
+See http://mathworld.wolfram.com/EulerAngles.html
+The order :math:`G B A` expresses the fact that the rotations are
+performed in the order of the vector (:math:`alpha` around axis `i` =
+`A` first).
+To convert a given Euler angle vector to a meaningful rotation, and a
+rotation matrix, we need to define:
+* the axes `i`, `j`, `k`
+* whether a rotation matrix should be applied on the left of a vector to
+ be transformed (vectors are column vectors) or on the right (vectors
+ are row vectors).
+* whether the rotations move the axes as they are applied (intrinsic
+ rotations) - compared the situation where the axes stay fixed and the
+ vectors move within the axis frame (extrinsic)
+* the handedness of the coordinate system
+See: http://en.wikipedia.org/wiki/Rotation_matrix#Ambiguities
+We are using the following conventions:
+* axes `i`, `j`, `k` are the `z`, `y`, and `x` axes respectively. Thus
+ an Euler angle vector [ :math:`alpha`, :math:`beta`. :math:`gamma` ]
+ in our convention implies a :math:`alpha` radian rotation around the
+ `z` axis, followed by a :math:`beta` rotation around the `y` axis,
+ followed by a :math:`gamma` rotation around the `x` axis.
+* the rotation matrix applies on the left, to column vectors on the
+ right, so if `R` is the rotation matrix, and `v` is a 3 x N matrix
+ with N column vectors, the transformed vector set `vdash` is given by
+ ``vdash = np.dot(R, v)``.
+* extrinsic rotations - the axes are fixed, and do not move with the
+ rotations.
+* a right-handed coordinate system
+The convention of rotation around ``z``, followed by rotation around
+``y``, followed by rotation around ``x``, is known (confusingly) as
+"xyz", pitch-roll-yaw, Cardan angles, or Tait-Bryan angles.
+'''
+
+import math
+
+import sys
+if sys.version_info >= (3,0):
+ from functools import reduce
+
+import numpy as np
+
+
+_FLOAT_EPS_4 = np.finfo(float).eps * 4.0
+
+
+def euler2mat(z=0, y=0, x=0):
+ ''' Return matrix for rotations around z, y and x axes
+ Uses the z, then y, then x convention above
+ Parameters
+ ----------
+ z : scalar
+ Rotation angle in radians around z-axis (performed first)
+ y : scalar
+ Rotation angle in radians around y-axis
+ x : scalar
+ Rotation angle in radians around x-axis (performed last)
+ Returns
+ -------
+ M : array shape (3,3)
+ Rotation matrix giving same rotation as for given angles
+ Examples
+ --------
+ >>> zrot = 1.3 # radians
+ >>> yrot = -0.1
+ >>> xrot = 0.2
+ >>> M = euler2mat(zrot, yrot, xrot)
+ >>> M.shape == (3, 3)
+ True
+ The output rotation matrix is equal to the composition of the
+ individual rotations
+ >>> M1 = euler2mat(zrot)
+ >>> M2 = euler2mat(0, yrot)
+ >>> M3 = euler2mat(0, 0, xrot)
+ >>> composed_M = np.dot(M3, np.dot(M2, M1))
+ >>> np.allclose(M, composed_M)
+ True
+ You can specify rotations by named arguments
+ >>> np.all(M3 == euler2mat(x=xrot))
+ True
+ When applying M to a vector, the vector should column vector to the
+ right of M. If the right hand side is a 2D array rather than a
+ vector, then each column of the 2D array represents a vector.
+ >>> vec = np.array([1, 0, 0]).reshape((3,1))
+ >>> v2 = np.dot(M, vec)
+ >>> vecs = np.array([[1, 0, 0],[0, 1, 0]]).T # giving 3x2 array
+ >>> vecs2 = np.dot(M, vecs)
+ Rotations are counter-clockwise.
+ >>> zred = np.dot(euler2mat(z=np.pi/2), np.eye(3))
+ >>> np.allclose(zred, [[0, -1, 0],[1, 0, 0], [0, 0, 1]])
+ True
+ >>> yred = np.dot(euler2mat(y=np.pi/2), np.eye(3))
+ >>> np.allclose(yred, [[0, 0, 1],[0, 1, 0], [-1, 0, 0]])
+ True
+ >>> xred = np.dot(euler2mat(x=np.pi/2), np.eye(3))
+ >>> np.allclose(xred, [[1, 0, 0],[0, 0, -1], [0, 1, 0]])
+ True
+ Notes
+ -----
+ The direction of rotation is given by the right-hand rule (orient
+ the thumb of the right hand along the axis around which the rotation
+ occurs, with the end of the thumb at the positive end of the axis;
+ curl your fingers; the direction your fingers curl is the direction
+ of rotation). Therefore, the rotations are counterclockwise if
+ looking along the axis of rotation from positive to negative.
+ '''
+ Ms = []
+ if z:
+ cosz = math.cos(z)
+ sinz = math.sin(z)
+ Ms.append(np.array(
+ [[cosz, -sinz, 0],
+ [sinz, cosz, 0],
+ [0, 0, 1]]))
+ if y:
+ cosy = math.cos(y)
+ siny = math.sin(y)
+ Ms.append(np.array(
+ [[cosy, 0, siny],
+ [0, 1, 0],
+ [-siny, 0, cosy]]))
+ if x:
+ cosx = math.cos(x)
+ sinx = math.sin(x)
+ Ms.append(np.array(
+ [[1, 0, 0],
+ [0, cosx, -sinx],
+ [0, sinx, cosx]]))
+ if Ms:
+ return reduce(np.dot, Ms[::-1])
+ return np.eye(3)
+
+
+def mat2euler(M, cy_thresh=None):
+ ''' Discover Euler angle vector from 3x3 matrix
+ Uses the conventions above.
+ Parameters
+ ----------
+ M : array-like, shape (3,3)
+ cy_thresh : None or scalar, optional
+ threshold below which to give up on straightforward arctan for
+ estimating x rotation. If None (default), estimate from
+ precision of input.
+ Returns
+ -------
+ z : scalar
+ y : scalar
+ x : scalar
+ Rotations in radians around z, y, x axes, respectively
+ Notes
+ -----
+ If there was no numerical error, the routine could be derived using
+ Sympy expression for z then y then x rotation matrix, which is::
+ [ cos(y)*cos(z), -cos(y)*sin(z), sin(y)],
+ [cos(x)*sin(z) + cos(z)*sin(x)*sin(y), cos(x)*cos(z) - sin(x)*sin(y)*sin(z), -cos(y)*sin(x)],
+ [sin(x)*sin(z) - cos(x)*cos(z)*sin(y), cos(z)*sin(x) + cos(x)*sin(y)*sin(z), cos(x)*cos(y)]
+ with the obvious derivations for z, y, and x
+ z = atan2(-r12, r11)
+ y = asin(r13)
+ x = atan2(-r23, r33)
+ Problems arise when cos(y) is close to zero, because both of::
+ z = atan2(cos(y)*sin(z), cos(y)*cos(z))
+ x = atan2(cos(y)*sin(x), cos(x)*cos(y))
+ will be close to atan2(0, 0), and highly unstable.
+ The ``cy`` fix for numerical instability below is from: *Graphics
+ Gems IV*, Paul Heckbert (editor), Academic Press, 1994, ISBN:
+ 0123361559. Specifically it comes from EulerAngles.c by Ken
+ Shoemake, and deals with the case where cos(y) is close to zero:
+ See: http://www.graphicsgems.org/
+ The code appears to be licensed (from the website) as "can be used
+ without restrictions".
+ '''
+ M = np.asarray(M)
+ if cy_thresh is None:
+ try:
+ cy_thresh = np.finfo(M.dtype).eps * 4
+ except ValueError:
+ cy_thresh = _FLOAT_EPS_4
+ r11, r12, r13, r21, r22, r23, r31, r32, r33 = M.flat
+ # cy: sqrt((cos(y)*cos(z))**2 + (cos(x)*cos(y))**2)
+ cy = math.sqrt(r33*r33 + r23*r23)
+ if cy > cy_thresh: # cos(y) not close to zero, standard form
+ z = math.atan2(-r12, r11) # atan2(cos(y)*sin(z), cos(y)*cos(z))
+ y = math.atan2(r13, cy) # atan2(sin(y), cy)
+ x = math.atan2(-r23, r33) # atan2(cos(y)*sin(x), cos(x)*cos(y))
+ else: # cos(y) (close to) zero, so x -> 0.0 (see above)
+ # so r21 -> sin(z), r22 -> cos(z) and
+ z = math.atan2(r21, r22)
+ y = math.atan2(r13, cy) # atan2(sin(y), cy)
+ x = 0.0
+ return z, y, x
+
+
+def euler2quat(z=0, y=0, x=0):
+ ''' Return quaternion corresponding to these Euler angles
+ Uses the z, then y, then x convention above
+ Parameters
+ ----------
+ z : scalar
+ Rotation angle in radians around z-axis (performed first)
+ y : scalar
+ Rotation angle in radians around y-axis
+ x : scalar
+ Rotation angle in radians around x-axis (performed last)
+ Returns
+ -------
+ quat : array shape (4,)
+ Quaternion in w, x, y z (real, then vector) format
+ Notes
+ -----
+ We can derive this formula in Sympy using:
+ 1. Formula giving quaternion corresponding to rotation of theta radians
+ about arbitrary axis:
+ http://mathworld.wolfram.com/EulerParameters.html
+ 2. Generated formulae from 1.) for quaternions corresponding to
+ theta radians rotations about ``x, y, z`` axes
+ 3. Apply quaternion multiplication formula -
+ http://en.wikipedia.org/wiki/Quaternions#Hamilton_product - to
+ formulae from 2.) to give formula for combined rotations.
+ '''
+ z = z/2.0
+ y = y/2.0
+ x = x/2.0
+ cz = math.cos(z)
+ sz = math.sin(z)
+ cy = math.cos(y)
+ sy = math.sin(y)
+ cx = math.cos(x)
+ sx = math.sin(x)
+ return np.array([
+ cx*cy*cz - sx*sy*sz,
+ cx*sy*sz + cy*cz*sx,
+ cx*cz*sy - sx*cy*sz,
+ cx*cy*sz + sx*cz*sy])
+
+
+def quat2euler(q):
+ ''' Return Euler angles corresponding to quaternion `q`
+ Parameters
+ ----------
+ q : 4 element sequence
+ w, x, y, z of quaternion
+ Returns
+ -------
+ z : scalar
+ Rotation angle in radians around z-axis (performed first)
+ y : scalar
+ Rotation angle in radians around y-axis
+ x : scalar
+ Rotation angle in radians around x-axis (performed last)
+ Notes
+ -----
+ It's possible to reduce the amount of calculation a little, by
+ combining parts of the ``quat2mat`` and ``mat2euler`` functions, but
+ the reduction in computation is small, and the code repetition is
+ large.
+ '''
+ # delayed import to avoid cyclic dependencies
+ import nibabel.quaternions as nq
+ return mat2euler(nq.quat2mat(q))
+
+
+def euler2angle_axis(z=0, y=0, x=0):
+ ''' Return angle, axis corresponding to these Euler angles
+ Uses the z, then y, then x convention above
+ Parameters
+ ----------
+ z : scalar
+ Rotation angle in radians around z-axis (performed first)
+ y : scalar
+ Rotation angle in radians around y-axis
+ x : scalar
+ Rotation angle in radians around x-axis (performed last)
+ Returns
+ -------
+ theta : scalar
+ angle of rotation
+ vector : array shape (3,)
+ axis around which rotation occurs
+ Examples
+ --------
+ >>> theta, vec = euler2angle_axis(0, 1.5, 0)
+ >>> print(theta)
+ 1.5
+ >>> np.allclose(vec, [0, 1, 0])
+ True
+ '''
+ # delayed import to avoid cyclic dependencies
+ import nibabel.quaternions as nq
+ return nq.quat2angle_axis(euler2quat(z, y, x))
+
+
+def angle_axis2euler(theta, vector, is_normalized=False):
+ ''' Convert angle, axis pair to Euler angles
+ Parameters
+ ----------
+ theta : scalar
+ angle of rotation
+ vector : 3 element sequence
+ vector specifying axis for rotation.
+ is_normalized : bool, optional
+ True if vector is already normalized (has norm of 1). Default
+ False
+ Returns
+ -------
+ z : scalar
+ y : scalar
+ x : scalar
+ Rotations in radians around z, y, x axes, respectively
+ Examples
+ --------
+ >>> z, y, x = angle_axis2euler(0, [1, 0, 0])
+ >>> np.allclose((z, y, x), 0)
+ True
+ Notes
+ -----
+ It's possible to reduce the amount of calculation a little, by
+ combining parts of the ``angle_axis2mat`` and ``mat2euler``
+ functions, but the reduction in computation is small, and the code
+ repetition is large.
+ '''
+ # delayed import to avoid cyclic dependencies
+ import nibabel.quaternions as nq
+ M = nq.angle_axis2mat(theta, vector, is_normalized)
+ return mat2euler(M)
\ No newline at end of file
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/visualizer/pc_utils.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/visualizer/pc_utils.py
new file mode 100644
index 0000000000..20051cb5d1
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/visualizer/pc_utils.py
@@ -0,0 +1,211 @@
+""" Utility functions for processing point clouds.
+Author: Charles R. Qi, Hao Su
+Date: November 2016
+"""
+
+import os
+import sys
+
+BASE_DIR = os.path.dirname(os.path.abspath(__file__))
+sys.path.append(BASE_DIR)
+
+# Draw point cloud
+from visualizer.eulerangles import euler2mat
+
+# Point cloud IO
+import numpy as np
+from visualizer.plyfile import PlyData, PlyElement
+
+# ----------------------------------------
+# Point Cloud/Volume Conversions
+# ----------------------------------------
+
+def point_cloud_to_volume_batch(point_clouds, vsize=12, radius=1.0, flatten=True):
+ """ Input is BxNx3 batch of point cloud
+ Output is Bx(vsize^3)
+ """
+ vol_list = []
+ for b in range(point_clouds.shape[0]):
+ vol = point_cloud_to_volume(np.squeeze(point_clouds[b, :, :]), vsize, radius)
+ if flatten:
+ vol_list.append(vol.flatten())
+ else:
+ vol_list.append(np.expand_dims(np.expand_dims(vol, -1), 0))
+ if flatten:
+ return np.vstack(vol_list)
+ else:
+ return np.concatenate(vol_list, 0)
+
+
+def point_cloud_to_volume(points, vsize, radius=1.0):
+ """ input is Nx3 points.
+ output is vsize*vsize*vsize
+ assumes points are in range [-radius, radius]
+ """
+ vol = np.zeros((vsize, vsize, vsize))
+ voxel = 2 * radius / float(vsize)
+ locations = (points + radius) / voxel
+ locations = locations.astype(int)
+ vol[locations[:, 0], locations[:, 1], locations[:, 2]] = 1.0
+ return vol
+
+
+# a = np.zeros((16,1024,3))
+# print point_cloud_to_volume_batch(a, 12, 1.0, False).shape
+
+def volume_to_point_cloud(vol):
+ """ vol is occupancy grid (value = 0 or 1) of size vsize*vsize*vsize
+ return Nx3 numpy array.
+ """
+ vsize = vol.shape[0]
+ assert (vol.shape[1] == vsize and vol.shape[1] == vsize)
+ points = []
+ for a in range(vsize):
+ for b in range(vsize):
+ for c in range(vsize):
+ if vol[a, b, c] == 1:
+ points.append(np.array([a, b, c]))
+ if len(points) == 0:
+ return np.zeros((0, 3))
+ points = np.vstack(points)
+ return points
+
+
+# ----------------------------------------
+# Point cloud IO
+# ----------------------------------------
+
+def read_ply(filename):
+ """ read XYZ point cloud from filename PLY file """
+ plydata = PlyData.read(filename)
+ pc = plydata['vertex'].data
+ pc_array = np.array([[x, y, z] for x, y, z in pc])
+ return pc_array
+
+
+def write_ply(points, filename, text=True):
+ """ input: Nx3, write points to filename as PLY format. """
+ points = [(points[i, 0], points[i, 1], points[i, 2]) for i in range(points.shape[0])]
+ vertex = np.array(points, dtype=[('x', 'f4'), ('y', 'f4'), ('z', 'f4')])
+ el = PlyElement.describe(vertex, 'vertex', comments=['vertices'])
+ PlyData([el], text=text).write(filename)
+
+
+# ----------------------------------------
+# Simple Point cloud and Volume Renderers
+# ----------------------------------------
+
+def draw_point_cloud(input_points, canvasSize=500, space=200, diameter=25,
+ xrot=0, yrot=0, zrot=0, switch_xyz=[0, 1, 2], normalize=True):
+ """ Render point cloud to image with alpha channel.
+ Input:
+ points: Nx3 numpy array (+y is up direction)
+ Output:
+ gray image as numpy array of size canvasSizexcanvasSize
+ """
+ image = np.zeros((canvasSize, canvasSize))
+ if input_points is None or input_points.shape[0] == 0:
+ return image
+
+ points = input_points[:, switch_xyz]
+ M = euler2mat(zrot, yrot, xrot)
+ points = (np.dot(M, points.transpose())).transpose()
+
+ # Normalize the point cloud
+ # We normalize scale to fit points in a unit sphere
+ if normalize:
+ centroid = np.mean(points, axis=0)
+ points -= centroid
+ furthest_distance = np.max(np.sqrt(np.sum(abs(points) ** 2, axis=-1)))
+ points /= furthest_distance
+
+ # Pre-compute the Gaussian disk
+ radius = (diameter - 1) / 2.0
+ disk = np.zeros((diameter, diameter))
+ for i in range(diameter):
+ for j in range(diameter):
+ if (i - radius) * (i - radius) + (j - radius) * (j - radius) <= radius * radius:
+ disk[i, j] = np.exp((-(i - radius) ** 2 - (j - radius) ** 2) / (radius ** 2))
+ mask = np.argwhere(disk > 0)
+ dx = mask[:, 0]
+ dy = mask[:, 1]
+ dv = disk[disk > 0]
+
+ # Order points by z-buffer
+ zorder = np.argsort(points[:, 2])
+ points = points[zorder, :]
+ points[:, 2] = (points[:, 2] - np.min(points[:, 2])) / (np.max(points[:, 2] - np.min(points[:, 2])))
+ max_depth = np.max(points[:, 2])
+
+ for i in range(points.shape[0]):
+ j = points.shape[0] - i - 1
+ x = points[j, 0]
+ y = points[j, 1]
+ xc = canvasSize / 2 + (x * space)
+ yc = canvasSize / 2 + (y * space)
+ xc = int(np.round(xc))
+ yc = int(np.round(yc))
+
+ px = dx + xc
+ py = dy + yc
+
+ image[px, py] = image[px, py] * 0.7 + dv * (max_depth - points[j, 2]) * 0.3
+
+ image = image / np.max(image)
+ return image
+
+
+def point_cloud_three_views(points):
+ """ input points Nx3 numpy array (+y is up direction).
+ return an numpy array gray image of size 500x1500. """
+ # +y is up direction
+ # xrot is azimuth
+ # yrot is in-plane
+ # zrot is elevation
+ img1 = draw_point_cloud(points, zrot=110 / 180.0 * np.pi, xrot=45 / 180.0 * np.pi, yrot=0 / 180.0 * np.pi)
+ img2 = draw_point_cloud(points, zrot=70 / 180.0 * np.pi, xrot=135 / 180.0 * np.pi, yrot=0 / 180.0 * np.pi)
+ img3 = draw_point_cloud(points, zrot=180.0 / 180.0 * np.pi, xrot=90 / 180.0 * np.pi, yrot=0 / 180.0 * np.pi)
+ image_large = np.concatenate([img1, img2, img3], 1)
+ return image_large
+
+
+from PIL import Image
+
+
+def point_cloud_three_views_demo():
+ """ Demo for draw_point_cloud function """
+ DATA_PATH = '../data/ShapeNet/'
+ train_data, _, _, _, _, _ = load_data(DATA_PATH,classification=False)
+ points = train_data[1]
+ im_array = point_cloud_three_views(points)
+ img = Image.fromarray(np.uint8(im_array * 255.0))
+ img.save('example.jpg')
+
+
+if __name__ == "__main__":
+ from data_utils.ShapeNetDataLoader import load_data
+ point_cloud_three_views_demo()
+
+import matplotlib.pyplot as plt
+
+
+def pyplot_draw_point_cloud(points, output_filename):
+ """ points is a Nx3 numpy array """
+ fig = plt.figure()
+ ax = fig.add_subplot(111, projection='3d')
+ ax.scatter(points[:, 0], points[:, 1], points[:, 2])
+ ax.set_xlabel('x')
+ ax.set_ylabel('y')
+ ax.set_zlabel('z')
+ # savefig(output_filename)
+
+
+def pyplot_draw_volume(vol, output_filename):
+ """ vol is of size vsize*vsize*vsize
+ output an image to output_filename
+ """
+ points = volume_to_point_cloud(vol)
+ pyplot_draw_point_cloud(points, output_filename)
+
+
+
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/visualizer/pic.png b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/visualizer/pic.png
new file mode 100644
index 0000000000..28d0ae56af
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/visualizer/pic.png differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/visualizer/pic2.png b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/visualizer/pic2.png
new file mode 100644
index 0000000000..f1f6d93e12
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/visualizer/pic2.png differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/visualizer/plyfile.py b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/visualizer/plyfile.py
new file mode 100644
index 0000000000..fa64bcfccc
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/visualizer/plyfile.py
@@ -0,0 +1,875 @@
+# Copyright 2014 Darsh Ranjan
+#
+# This file is part of python-plyfile.
+#
+# python-plyfile is free software: you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# python-plyfile is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with python-plyfile. If not, see
+# .
+
+from itertools import islice as _islice
+
+import numpy as _np
+from sys import byteorder as _byteorder
+
+
+try:
+ _range = xrange
+except NameError:
+ _range = range
+
+
+# Many-many relation
+_data_type_relation = [
+ ('int8', 'i1'),
+ ('char', 'i1'),
+ ('uint8', 'u1'),
+ ('uchar', 'b1'),
+ ('uchar', 'u1'),
+ ('int16', 'i2'),
+ ('short', 'i2'),
+ ('uint16', 'u2'),
+ ('ushort', 'u2'),
+ ('int32', 'i4'),
+ ('int', 'i4'),
+ ('uint32', 'u4'),
+ ('uint', 'u4'),
+ ('float32', 'f4'),
+ ('float', 'f4'),
+ ('float64', 'f8'),
+ ('double', 'f8')
+]
+
+_data_types = dict(_data_type_relation)
+_data_type_reverse = dict((b, a) for (a, b) in _data_type_relation)
+
+_types_list = []
+_types_set = set()
+for (_a, _b) in _data_type_relation:
+ if _a not in _types_set:
+ _types_list.append(_a)
+ _types_set.add(_a)
+ if _b not in _types_set:
+ _types_list.append(_b)
+ _types_set.add(_b)
+
+
+_byte_order_map = {
+ 'ascii': '=',
+ 'binary_little_endian': '<',
+ 'binary_big_endian': '>'
+}
+
+_byte_order_reverse = {
+ '<': 'binary_little_endian',
+ '>': 'binary_big_endian'
+}
+
+_native_byte_order = {'little': '<', 'big': '>'}[_byteorder]
+
+
+def _lookup_type(type_str):
+ if type_str not in _data_type_reverse:
+ try:
+ type_str = _data_types[type_str]
+ except KeyError:
+ raise ValueError("field type %r not in %r" %
+ (type_str, _types_list))
+
+ return _data_type_reverse[type_str]
+
+
+def _split_line(line, n):
+ fields = line.split(None, n)
+ if len(fields) == n:
+ fields.append('')
+
+ assert len(fields) == n + 1
+
+ return fields
+
+
+def make2d(array, cols=None, dtype=None):
+ '''
+ Make a 2D array from an array of arrays. The `cols' and `dtype'
+ arguments can be omitted if the array is not empty.
+ '''
+ if (cols is None or dtype is None) and not len(array):
+ raise RuntimeError("cols and dtype must be specified for empty "
+ "array")
+
+ if cols is None:
+ cols = len(array[0])
+
+ if dtype is None:
+ dtype = array[0].dtype
+
+ return _np.fromiter(array, [('_', dtype, (cols,))],
+ count=len(array))['_']
+
+
+class PlyParseError(Exception):
+
+ '''
+ Raised when a PLY file cannot be parsed.
+ The attributes `element', `row', `property', and `message' give
+ additional information.
+ '''
+
+ def __init__(self, message, element=None, row=None, prop=None):
+ self.message = message
+ self.element = element
+ self.row = row
+ self.prop = prop
+
+ s = ''
+ if self.element:
+ s += 'element %r: ' % self.element.name
+ if self.row is not None:
+ s += 'row %d: ' % self.row
+ if self.prop:
+ s += 'property %r: ' % self.prop.name
+ s += self.message
+
+ Exception.__init__(self, s)
+
+ def __repr__(self):
+ return ('PlyParseError(%r, element=%r, row=%r, prop=%r)' %
+ self.message, self.element, self.row, self.prop)
+
+
+class PlyData(object):
+
+ '''
+ PLY file header and data.
+ A PlyData instance is created in one of two ways: by the static
+ method PlyData.read (to read a PLY file), or directly from __init__
+ given a sequence of elements (which can then be written to a PLY
+ file).
+ '''
+
+ def __init__(self, elements=[], text=False, byte_order='=',
+ comments=[], obj_info=[]):
+ '''
+ elements: sequence of PlyElement instances.
+ text: whether the resulting PLY file will be text (True) or
+ binary (False).
+ byte_order: '<' for little-endian, '>' for big-endian, or '='
+ for native. This is only relevant if `text' is False.
+ comments: sequence of strings that will be placed in the header
+ between the 'ply' and 'format ...' lines.
+ obj_info: like comments, but will be placed in the header with
+ "obj_info ..." instead of "comment ...".
+ '''
+ if byte_order == '=' and not text:
+ byte_order = _native_byte_order
+
+ self.byte_order = byte_order
+ self.text = text
+
+ self.comments = list(comments)
+ self.obj_info = list(obj_info)
+ self.elements = elements
+
+ def _get_elements(self):
+ return self._elements
+
+ def _set_elements(self, elements):
+ self._elements = tuple(elements)
+ self._index()
+
+ elements = property(_get_elements, _set_elements)
+
+ def _get_byte_order(self):
+ return self._byte_order
+
+ def _set_byte_order(self, byte_order):
+ if byte_order not in ['<', '>', '=']:
+ raise ValueError("byte order must be '<', '>', or '='")
+
+ self._byte_order = byte_order
+
+ byte_order = property(_get_byte_order, _set_byte_order)
+
+ def _index(self):
+ self._element_lookup = dict((elt.name, elt) for elt in
+ self._elements)
+ if len(self._element_lookup) != len(self._elements):
+ raise ValueError("two elements with same name")
+
+ @staticmethod
+ def _parse_header(stream):
+ '''
+ Parse a PLY header from a readable file-like stream.
+ '''
+ lines = []
+ comments = {'comment': [], 'obj_info': []}
+ while True:
+ line = stream.readline().decode('ascii').strip()
+ fields = _split_line(line, 1)
+
+ if fields[0] == 'end_header':
+ break
+
+ elif fields[0] in comments.keys():
+ lines.append(fields)
+ else:
+ lines.append(line.split())
+
+ a = 0
+ if lines[a] != ['ply']:
+ raise PlyParseError("expected 'ply'")
+
+ a += 1
+ while lines[a][0] in comments.keys():
+ comments[lines[a][0]].append(lines[a][1])
+ a += 1
+
+ if lines[a][0] != 'format':
+ raise PlyParseError("expected 'format'")
+
+ if lines[a][2] != '1.0':
+ raise PlyParseError("expected version '1.0'")
+
+ if len(lines[a]) != 3:
+ raise PlyParseError("too many fields after 'format'")
+
+ fmt = lines[a][1]
+
+ if fmt not in _byte_order_map:
+ raise PlyParseError("don't understand format %r" % fmt)
+
+ byte_order = _byte_order_map[fmt]
+ text = fmt == 'ascii'
+
+ a += 1
+ while a < len(lines) and lines[a][0] in comments.keys():
+ comments[lines[a][0]].append(lines[a][1])
+ a += 1
+
+ return PlyData(PlyElement._parse_multi(lines[a:]),
+ text, byte_order,
+ comments['comment'], comments['obj_info'])
+
+ @staticmethod
+ def read(stream):
+ '''
+ Read PLY data from a readable file-like object or filename.
+ '''
+ (must_close, stream) = _open_stream(stream, 'read')
+ try:
+ data = PlyData._parse_header(stream)
+ for elt in data:
+ elt._read(stream, data.text, data.byte_order)
+ finally:
+ if must_close:
+ stream.close()
+
+ return data
+
+ def write(self, stream):
+ '''
+ Write PLY data to a writeable file-like object or filename.
+ '''
+ (must_close, stream) = _open_stream(stream, 'write')
+ try:
+ stream.write(self.header.encode('ascii'))
+ stream.write(b'\r\n')
+ for elt in self:
+ elt._write(stream, self.text, self.byte_order)
+ finally:
+ if must_close:
+ stream.close()
+
+ @property
+ def header(self):
+ '''
+ Provide PLY-formatted metadata for the instance.
+ '''
+ lines = ['ply']
+
+ if self.text:
+ lines.append('format ascii 1.0')
+ else:
+ lines.append('format ' +
+ _byte_order_reverse[self.byte_order] +
+ ' 1.0')
+
+ # Some information is lost here, since all comments are placed
+ # between the 'format' line and the first element.
+ for c in self.comments:
+ lines.append('comment ' + c)
+
+ for c in self.obj_info:
+ lines.append('obj_info ' + c)
+
+ lines.extend(elt.header for elt in self.elements)
+ lines.append('end_header')
+ return '\r\n'.join(lines)
+
+ def __iter__(self):
+ return iter(self.elements)
+
+ def __len__(self):
+ return len(self.elements)
+
+ def __contains__(self, name):
+ return name in self._element_lookup
+
+ def __getitem__(self, name):
+ return self._element_lookup[name]
+
+ def __str__(self):
+ return self.header
+
+ def __repr__(self):
+ return ('PlyData(%r, text=%r, byte_order=%r, '
+ 'comments=%r, obj_info=%r)' %
+ (self.elements, self.text, self.byte_order,
+ self.comments, self.obj_info))
+
+
+def _open_stream(stream, read_or_write):
+ if hasattr(stream, read_or_write):
+ return (False, stream)
+ try:
+ return (True, open(stream, read_or_write[0] + 'b'))
+ except TypeError:
+ raise RuntimeError("expected open file or filename")
+
+
+class PlyElement(object):
+
+ '''
+ PLY file element.
+ A client of this library doesn't normally need to instantiate this
+ directly, so the following is only for the sake of documenting the
+ internals.
+ Creating a PlyElement instance is generally done in one of two ways:
+ as a byproduct of PlyData.read (when reading a PLY file) and by
+ PlyElement.describe (before writing a PLY file).
+ '''
+
+ def __init__(self, name, properties, count, comments=[]):
+ '''
+ This is not part of the public interface. The preferred methods
+ of obtaining PlyElement instances are PlyData.read (to read from
+ a file) and PlyElement.describe (to construct from a numpy
+ array).
+ '''
+ self._name = str(name)
+ self._check_name()
+ self._count = count
+
+ self._properties = tuple(properties)
+ self._index()
+
+ self.comments = list(comments)
+
+ self._have_list = any(isinstance(p, PlyListProperty)
+ for p in self.properties)
+
+ @property
+ def count(self):
+ return self._count
+
+ def _get_data(self):
+ return self._data
+
+ def _set_data(self, data):
+ self._data = data
+ self._count = len(data)
+ self._check_sanity()
+
+ data = property(_get_data, _set_data)
+
+ def _check_sanity(self):
+ for prop in self.properties:
+ if prop.name not in self._data.dtype.fields:
+ raise ValueError("dangling property %r" % prop.name)
+
+ def _get_properties(self):
+ return self._properties
+
+ def _set_properties(self, properties):
+ self._properties = tuple(properties)
+ self._check_sanity()
+ self._index()
+
+ properties = property(_get_properties, _set_properties)
+
+ def _index(self):
+ self._property_lookup = dict((prop.name, prop)
+ for prop in self._properties)
+ if len(self._property_lookup) != len(self._properties):
+ raise ValueError("two properties with same name")
+
+ def ply_property(self, name):
+ return self._property_lookup[name]
+
+ @property
+ def name(self):
+ return self._name
+
+ def _check_name(self):
+ if any(c.isspace() for c in self._name):
+ msg = "element name %r contains spaces" % self._name
+ raise ValueError(msg)
+
+ def dtype(self, byte_order='='):
+ '''
+ Return the numpy dtype of the in-memory representation of the
+ data. (If there are no list properties, and the PLY format is
+ binary, then this also accurately describes the on-disk
+ representation of the element.)
+ '''
+ return [(prop.name, prop.dtype(byte_order))
+ for prop in self.properties]
+
+ @staticmethod
+ def _parse_multi(header_lines):
+ '''
+ Parse a list of PLY element definitions.
+ '''
+ elements = []
+ while header_lines:
+ (elt, header_lines) = PlyElement._parse_one(header_lines)
+ elements.append(elt)
+
+ return elements
+
+ @staticmethod
+ def _parse_one(lines):
+ '''
+ Consume one element definition. The unconsumed input is
+ returned along with a PlyElement instance.
+ '''
+ a = 0
+ line = lines[a]
+
+ if line[0] != 'element':
+ raise PlyParseError("expected 'element'")
+ if len(line) > 3:
+ raise PlyParseError("too many fields after 'element'")
+ if len(line) < 3:
+ raise PlyParseError("too few fields after 'element'")
+
+ (name, count) = (line[1], int(line[2]))
+
+ comments = []
+ properties = []
+ while True:
+ a += 1
+ if a >= len(lines):
+ break
+
+ if lines[a][0] == 'comment':
+ comments.append(lines[a][1])
+ elif lines[a][0] == 'property':
+ properties.append(PlyProperty._parse_one(lines[a]))
+ else:
+ break
+
+ return (PlyElement(name, properties, count, comments),
+ lines[a:])
+
+ @staticmethod
+ def describe(data, name, len_types={}, val_types={},
+ comments=[]):
+ '''
+ Construct a PlyElement from an array's metadata.
+ len_types and val_types can be given as mappings from list
+ property names to type strings (like 'u1', 'f4', etc., or
+ 'int8', 'float32', etc.). These can be used to define the length
+ and value types of list properties. List property lengths
+ always default to type 'u1' (8-bit unsigned integer), and value
+ types default to 'i4' (32-bit integer).
+ '''
+ if not isinstance(data, _np.ndarray):
+ raise TypeError("only numpy arrays are supported")
+
+ if len(data.shape) != 1:
+ raise ValueError("only one-dimensional arrays are "
+ "supported")
+
+ count = len(data)
+
+ properties = []
+ descr = data.dtype.descr
+
+ for t in descr:
+ if not isinstance(t[1], str):
+ raise ValueError("nested records not supported")
+
+ if not t[0]:
+ raise ValueError("field with empty name")
+
+ if len(t) != 2 or t[1][1] == 'O':
+ # non-scalar field, which corresponds to a list
+ # property in PLY.
+
+ if t[1][1] == 'O':
+ if len(t) != 2:
+ raise ValueError("non-scalar object fields not "
+ "supported")
+
+ len_str = _data_type_reverse[len_types.get(t[0], 'u1')]
+ if t[1][1] == 'O':
+ val_type = val_types.get(t[0], 'i4')
+ val_str = _lookup_type(val_type)
+ else:
+ val_str = _lookup_type(t[1][1:])
+
+ prop = PlyListProperty(t[0], len_str, val_str)
+ else:
+ val_str = _lookup_type(t[1][1:])
+ prop = PlyProperty(t[0], val_str)
+
+ properties.append(prop)
+
+ elt = PlyElement(name, properties, count, comments)
+ elt.data = data
+
+ return elt
+
+ def _read(self, stream, text, byte_order):
+ '''
+ Read the actual data from a PLY file.
+ '''
+ if text:
+ self._read_txt(stream)
+ else:
+ if self._have_list:
+ # There are list properties, so a simple load is
+ # impossible.
+ self._read_bin(stream, byte_order)
+ else:
+ # There are no list properties, so loading the data is
+ # much more straightforward.
+ self._data = _np.fromfile(stream,
+ self.dtype(byte_order),
+ self.count)
+
+ if len(self._data) < self.count:
+ k = len(self._data)
+ del self._data
+ raise PlyParseError("early end-of-file", self, k)
+
+ self._check_sanity()
+
+ def _write(self, stream, text, byte_order):
+ '''
+ Write the data to a PLY file.
+ '''
+ if text:
+ self._write_txt(stream)
+ else:
+ if self._have_list:
+ # There are list properties, so serialization is
+ # slightly complicated.
+ self._write_bin(stream, byte_order)
+ else:
+ # no list properties, so serialization is
+ # straightforward.
+ self.data.astype(self.dtype(byte_order),
+ copy=False).tofile(stream)
+
+ def _read_txt(self, stream):
+ '''
+ Load a PLY element from an ASCII-format PLY file. The element
+ may contain list properties.
+ '''
+ self._data = _np.empty(self.count, dtype=self.dtype())
+
+ k = 0
+ for line in _islice(iter(stream.readline, b''), self.count):
+ fields = iter(line.strip().split())
+ for prop in self.properties:
+ try:
+ self._data[prop.name][k] = prop._from_fields(fields)
+ except StopIteration:
+ raise PlyParseError("early end-of-line",
+ self, k, prop)
+ except ValueError:
+ raise PlyParseError("malformed input",
+ self, k, prop)
+ try:
+ next(fields)
+ except StopIteration:
+ pass
+ else:
+ raise PlyParseError("expected end-of-line", self, k)
+ k += 1
+
+ if k < self.count:
+ del self._data
+ raise PlyParseError("early end-of-file", self, k)
+
+ def _write_txt(self, stream):
+ '''
+ Save a PLY element to an ASCII-format PLY file. The element may
+ contain list properties.
+ '''
+ for rec in self.data:
+ fields = []
+ for prop in self.properties:
+ fields.extend(prop._to_fields(rec[prop.name]))
+
+ _np.savetxt(stream, [fields], '%.18g', newline='\r\n')
+
+ def _read_bin(self, stream, byte_order):
+ '''
+ Load a PLY element from a binary PLY file. The element may
+ contain list properties.
+ '''
+ self._data = _np.empty(self.count, dtype=self.dtype(byte_order))
+
+ for k in _range(self.count):
+ for prop in self.properties:
+ try:
+ self._data[prop.name][k] = \
+ prop._read_bin(stream, byte_order)
+ except StopIteration:
+ raise PlyParseError("early end-of-file",
+ self, k, prop)
+
+ def _write_bin(self, stream, byte_order):
+ '''
+ Save a PLY element to a binary PLY file. The element may
+ contain list properties.
+ '''
+ for rec in self.data:
+ for prop in self.properties:
+ prop._write_bin(rec[prop.name], stream, byte_order)
+
+ @property
+ def header(self):
+ '''
+ Format this element's metadata as it would appear in a PLY
+ header.
+ '''
+ lines = ['element %s %d' % (self.name, self.count)]
+
+ # Some information is lost here, since all comments are placed
+ # between the 'element' line and the first property definition.
+ for c in self.comments:
+ lines.append('comment ' + c)
+
+ lines.extend(list(map(str, self.properties)))
+
+ return '\r\n'.join(lines)
+
+ def __getitem__(self, key):
+ return self.data[key]
+
+ def __setitem__(self, key, value):
+ self.data[key] = value
+
+ def __str__(self):
+ return self.header
+
+ def __repr__(self):
+ return ('PlyElement(%r, %r, count=%d, comments=%r)' %
+ (self.name, self.properties, self.count,
+ self.comments))
+
+
+class PlyProperty(object):
+
+ '''
+ PLY property description. This class is pure metadata; the data
+ itself is contained in PlyElement instances.
+ '''
+
+ def __init__(self, name, val_dtype):
+ self._name = str(name)
+ self._check_name()
+ self.val_dtype = val_dtype
+
+ def _get_val_dtype(self):
+ return self._val_dtype
+
+ def _set_val_dtype(self, val_dtype):
+ self._val_dtype = _data_types[_lookup_type(val_dtype)]
+
+ val_dtype = property(_get_val_dtype, _set_val_dtype)
+
+ @property
+ def name(self):
+ return self._name
+
+ def _check_name(self):
+ if any(c.isspace() for c in self._name):
+ msg = "Error: property name %r contains spaces" % self._name
+ raise RuntimeError(msg)
+
+ @staticmethod
+ def _parse_one(line):
+ assert line[0] == 'property'
+
+ if line[1] == 'list':
+ if len(line) > 5:
+ raise PlyParseError("too many fields after "
+ "'property list'")
+ if len(line) < 5:
+ raise PlyParseError("too few fields after "
+ "'property list'")
+
+ return PlyListProperty(line[4], line[2], line[3])
+
+ else:
+ if len(line) > 3:
+ raise PlyParseError("too many fields after "
+ "'property'")
+ if len(line) < 3:
+ raise PlyParseError("too few fields after "
+ "'property'")
+
+ return PlyProperty(line[2], line[1])
+
+ def dtype(self, byte_order='='):
+ '''
+ Return the numpy dtype description for this property (as a tuple
+ of strings).
+ '''
+ return byte_order + self.val_dtype
+
+ def _from_fields(self, fields):
+ '''
+ Parse from generator. Raise StopIteration if the property could
+ not be read.
+ '''
+ return _np.dtype(self.dtype()).type(next(fields))
+
+ def _to_fields(self, data):
+ '''
+ Return generator over one item.
+ '''
+ yield _np.dtype(self.dtype()).type(data)
+
+ def _read_bin(self, stream, byte_order):
+ '''
+ Read data from a binary stream. Raise StopIteration if the
+ property could not be read.
+ '''
+ try:
+ return _np.fromfile(stream, self.dtype(byte_order), 1)[0]
+ except IndexError:
+ raise StopIteration
+
+ def _write_bin(self, data, stream, byte_order):
+ '''
+ Write data to a binary stream.
+ '''
+ _np.dtype(self.dtype(byte_order)).type(data).tofile(stream)
+
+ def __str__(self):
+ val_str = _data_type_reverse[self.val_dtype]
+ return 'property %s %s' % (val_str, self.name)
+
+ def __repr__(self):
+ return 'PlyProperty(%r, %r)' % (self.name,
+ _lookup_type(self.val_dtype))
+
+
+class PlyListProperty(PlyProperty):
+
+ '''
+ PLY list property description.
+ '''
+
+ def __init__(self, name, len_dtype, val_dtype):
+ PlyProperty.__init__(self, name, val_dtype)
+
+ self.len_dtype = len_dtype
+
+ def _get_len_dtype(self):
+ return self._len_dtype
+
+ def _set_len_dtype(self, len_dtype):
+ self._len_dtype = _data_types[_lookup_type(len_dtype)]
+
+ len_dtype = property(_get_len_dtype, _set_len_dtype)
+
+ def dtype(self, byte_order='='):
+ '''
+ List properties always have a numpy dtype of "object".
+ '''
+ return '|O'
+
+ def list_dtype(self, byte_order='='):
+ '''
+ Return the pair (len_dtype, val_dtype) (both numpy-friendly
+ strings).
+ '''
+ return (byte_order + self.len_dtype,
+ byte_order + self.val_dtype)
+
+ def _from_fields(self, fields):
+ (len_t, val_t) = self.list_dtype()
+
+ n = int(_np.dtype(len_t).type(next(fields)))
+
+ data = _np.loadtxt(list(_islice(fields, n)), val_t, ndmin=1)
+ if len(data) < n:
+ raise StopIteration
+
+ return data
+
+ def _to_fields(self, data):
+ '''
+ Return generator over the (numerical) PLY representation of the
+ list data (length followed by actual data).
+ '''
+ (len_t, val_t) = self.list_dtype()
+
+ data = _np.asarray(data, dtype=val_t).ravel()
+
+ yield _np.dtype(len_t).type(data.size)
+ for x in data:
+ yield x
+
+ def _read_bin(self, stream, byte_order):
+ (len_t, val_t) = self.list_dtype(byte_order)
+
+ try:
+ n = _np.fromfile(stream, len_t, 1)[0]
+ except IndexError:
+ raise StopIteration
+
+ data = _np.fromfile(stream, val_t, n)
+ if len(data) < n:
+ raise StopIteration
+
+ return data
+
+ def _write_bin(self, data, stream, byte_order):
+ '''
+ Write data to a binary stream.
+ '''
+ (len_t, val_t) = self.list_dtype(byte_order)
+
+ data = _np.asarray(data, dtype=val_t).ravel()
+
+ _np.array(data.size, dtype=len_t).tofile(stream)
+ data.tofile(stream)
+
+ def __str__(self):
+ len_str = _data_type_reverse[self.len_dtype]
+ val_str = _data_type_reverse[self.val_dtype]
+ return 'property list %s %s %s' % (len_str, val_str, self.name)
+
+ def __repr__(self):
+ return ('PlyListProperty(%r, %r, %r)' %
+ (self.name,
+ _lookup_type(self.len_dtype),
+ _lookup_type(self.val_dtype)))
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/visualizer/render_balls_so.cpp b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/visualizer/render_balls_so.cpp
new file mode 100644
index 0000000000..444c99a96b
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/Pointnet_Pointnet2_pytorch/visualizer/render_balls_so.cpp
@@ -0,0 +1,58 @@
+#include
+#include
+#include
+#include
+using namespace std;
+
+struct PointInfo{
+ int x,y,z;
+ float r,g,b;
+};
+
+extern "C"{
+
+void render_ball(int h,int w,unsigned char * show,int n,int * xyzs,float * c0,float * c1,float * c2,int r){
+ r=max(r,1);
+ vector depth(h*w,-2100000000);
+ vector pattern;
+ for (int dx=-r;dx<=r;dx++)
+ for (int dy=-r;dy<=r;dy++)
+ if (dx*dx+dy*dy=h || y2<0 || y2>=w) && depth[x2*w+y2] 0:
+ show[:, :, 0] = np.maximum(show[:, :, 0], np.roll(show[:, :, 0], 1, axis=0))
+ if magnifyBlue >= 2:
+ show[:, :, 0] = np.maximum(show[:, :, 0], np.roll(show[:, :, 0], -1, axis=0))
+ show[:, :, 0] = np.maximum(show[:, :, 0], np.roll(show[:, :, 0], 1, axis=1))
+ if magnifyBlue >= 2:
+ show[:, :, 0] = np.maximum(show[:, :, 0], np.roll(show[:, :, 0], -1, axis=1))
+ if showrot:
+ cv2.putText(show, 'xangle %d' % (int(xangle / np.pi * 180)), (30, showsz - 30), 0, 0.5,
+ cv2.cv.CV_RGB(255, 0, 0))
+ cv2.putText(show, 'yangle %d' % (int(yangle / np.pi * 180)), (30, showsz - 50), 0, 0.5,
+ cv2.cv.CV_RGB(255, 0, 0))
+ cv2.putText(show, 'zoom %d%%' % (int(zoom * 100)), (30, showsz - 70), 0, 0.5, cv2.cv.CV_RGB(255, 0, 0))
+
+ changed = True
+ while True:
+ if changed:
+ render()
+ changed = False
+ cv2.imshow('show3d', show)
+ if waittime == 0:
+ cmd = cv2.waitKey(10) % 256
+ else:
+ cmd = cv2.waitKey(waittime) % 256
+ if cmd == ord('q'):
+ break
+ elif cmd == ord('Q'):
+ sys.exit(0)
+
+ if cmd == ord('t') or cmd == ord('p'):
+ if cmd == ord('t'):
+ if c_gt is None:
+ c0 = np.zeros((len(xyz),), dtype='float32') + 255
+ c1 = np.zeros((len(xyz),), dtype='float32') + 255
+ c2 = np.zeros((len(xyz),), dtype='float32') + 255
+ else:
+ c0 = c_gt[:, 0]
+ c1 = c_gt[:, 1]
+ c2 = c_gt[:, 2]
+ else:
+ if c_pred is None:
+ c0 = np.zeros((len(xyz),), dtype='float32') + 255
+ c1 = np.zeros((len(xyz),), dtype='float32') + 255
+ c2 = np.zeros((len(xyz),), dtype='float32') + 255
+ else:
+ c0 = c_pred[:, 0]
+ c1 = c_pred[:, 1]
+ c2 = c_pred[:, 2]
+ if normalizecolor:
+ c0 /= (c0.max() + 1e-14) / 255.0
+ c1 /= (c1.max() + 1e-14) / 255.0
+ c2 /= (c2.max() + 1e-14) / 255.0
+ c0 = np.require(c0, 'float32', 'C')
+ c1 = np.require(c1, 'float32', 'C')
+ c2 = np.require(c2, 'float32', 'C')
+ changed = True
+
+ if cmd == ord('n'):
+ zoom *= 1.1
+ changed = True
+ elif cmd == ord('m'):
+ zoom /= 1.1
+ changed = True
+ elif cmd == ord('r'):
+ zoom = 1.0
+ changed = True
+ elif cmd == ord('s'):
+ cv2.imwrite('show3d.png', show)
+ if waittime != 0:
+ break
+ return cmd
+
+
+if __name__ == '__main__':
+ import os
+ import numpy as np
+ import argparse
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--dataset', type=str, default='../data/shapenet', help='dataset path')
+ parser.add_argument('--category', type=str, default='Airplane', help='select category')
+ parser.add_argument('--npoints', type=int, default=2500, help='resample points number')
+ parser.add_argument('--ballradius', type=int, default=10, help='ballradius')
+ opt = parser.parse_args()
+ '''
+ Airplane 02691156
+ Bag 02773838
+ Cap 02954340
+ Car 02958343
+ Chair 03001627
+ Earphone 03261776
+ Guitar 03467517
+ Knife 03624134
+ Lamp 03636649
+ Laptop 03642806
+ Motorbike 03790512
+ Mug 03797390
+ Pistol 03948459
+ Rocket 04099429
+ Skateboard 04225987
+ Table 04379243'''
+
+ cmap = np.array([[1.00000000e+00, 0.00000000e+00, 0.00000000e+00],
+ [3.12493437e-02, 1.00000000e+00, 1.31250131e-06],
+ [0.00000000e+00, 6.25019688e-02, 1.00000000e+00],
+ [1.00000000e+00, 0.00000000e+00, 9.37500000e-02],
+ [1.00000000e+00, 0.00000000e+00, 9.37500000e-02],
+ [1.00000000e+00, 0.00000000e+00, 9.37500000e-02],
+ [1.00000000e+00, 0.00000000e+00, 9.37500000e-02],
+ [1.00000000e+00, 0.00000000e+00, 9.37500000e-02],
+ [1.00000000e+00, 0.00000000e+00, 9.37500000e-02],
+ [1.00000000e+00, 0.00000000e+00, 9.37500000e-02]])
+ BASE_DIR = os.path.dirname(os.path.abspath(__file__))
+ ROOT_DIR = os.path.dirname(BASE_DIR)
+ sys.path.append(BASE_DIR)
+ sys.path.append(os.path.join(ROOT_DIR, 'data_utils'))
+
+ from ShapeNetDataLoader import PartNormalDataset
+ root = '../data/shapenetcore_partanno_segmentation_benchmark_v0_normal/'
+ dataset = PartNormalDataset(root = root, npoints=2048, split='test', normal_channel=False)
+ idx = np.random.randint(0, len(dataset))
+ data = dataset[idx]
+ point_set, _, seg = data
+ choice = np.random.choice(point_set.shape[0], opt.npoints, replace=True)
+ point_set, seg = point_set[choice, :], seg[choice]
+ seg = seg - seg.min()
+ gt = cmap[seg, :]
+ pred = cmap[seg, :]
+ showpoints(point_set, gt, c_pred=pred, waittime=0, showrot=False, magnifyBlue=0, freezerot=False,
+ background=(255, 255, 255), normalizecolor=True, ballradius=opt.ballradius)
+
+
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/README.md b/dimos/models/manipulation/contact_graspnet_pytorch/README.md
new file mode 100644
index 0000000000..780764a79e
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/README.md
@@ -0,0 +1,136 @@
+# Contact-GraspNet Pytorch
+
+This is a pytorch implementation of Contact-GraspNet. The original tensorflow
+implementation can be found at [https://github.com/NVlabs/contact_graspnet](https://github.com/NVlabs/contact_graspnet).
+
+### Disclaimer
+This is not an official implementation of Contact-GraspNet. The results shown here have been evaluated
+empirically and may not match the results in the original paper. This code is provided as-is and is not
+guaranteed to work. Please use at your own risk.
+
+Additionally, this code implements the core features of Contact-GraspNet as presented
+by the authors. It does not implement all possible configuration as implemented in the original
+tensorflow implementation. If you implement additional features, please consider submitting a pull request.
+
+
+### Contact-GraspNet: Efficient 6-DoF Grasp Generation in Cluttered Scenes
+Martin Sundermeyer, Arsalan Mousavian, Rudolph Triebel, Dieter Fox
+ICRA 2021
+
+[paper](https://arxiv.org/abs/2103.14127), [project page](https://research.nvidia.com/publication/2021-03_Contact-GraspNet%3A--Efficient), [video](http://www.youtube.com/watch?v=qRLKYSLXElM)
+
+
+
+
+
+## Installation
+This code has been tested with python 3.9.
+
+Create the conda env.
+```
+conda env create -f contact_graspnet_env.yml
+```
+
+Install as a package.
+```
+pip3 install -e .
+```
+
+### Troubleshooting
+
+N/A
+
+
+### Hardware
+Training:
+ Tested with 1x Nvidia GPU >= 24GB VRAM. Reduce batch size if you have less VRAM.
+
+Inference: 1x Nvidia GPU >= 8GB VRAM (might work with less).
+
+
+## Inference
+Model weights are included in the `checkpoints` directory. Test data can be found in the `test_data` directory.
+
+Contact-GraspNet can directly predict a 6-DoF grasp distribution from a raw scene point cloud. However, to obtain object-wise grasps, remove background grasps and to achieve denser proposals it is highly recommended to use (unknown) object segmentation. We used [FastSAM](https://github.com/CASIA-IVA-Lab/FastSAM) for unknown object segmentation (in contrast to the original tensorflow implementation which uses [UIOS](https://github.com/chrisdxie/uois)). Note: Infrastructure for segmentation is not included in this repository.
+
+Given a .npy/.npz file with a depth map (in meters), camera matrix K and (optionally) a 2D segmentation map, execute:
+
+```shell
+python contact_graspnet_pytorch/inference.py \
+ --np_path="test_data/*.npy" \
+ --local_regions --filter_grasps
+```
+
+
+
+
+Note: This image is from the original Contact-GraspNet repo. Results may vary.
+--> close the window to go to next scene
+
+Given a .npy/.npz file with just a 3D point cloud (in meters), execute [for example](examples/realsense_crop_sigma_001.png):
+```shell
+python contact_graspnet/inference.py --np_path=/path/to/your/pc.npy \
+ --forward_passes=5 \
+ --z_range=[0.2,1.1]
+```
+
+`--np_path`: input .npz/.npy file(s) with 'depth', 'K' and optionally 'segmap', 'rgb' keys. For processing a Nx3 point cloud instead use 'xzy' and optionally 'xyz_color' as keys.
+`--ckpt_dir`: relative path to checkpooint directory. By default `checkpoint/scene_test_2048_bs3_hor_sigma_001` is used. For very clean / noisy depth data consider `scene_2048_bs3_rad2_32` / `scene_test_2048_bs3_hor_sigma_0025` trained with no / strong noise.
+`--local_regions`: Crop 3D local regions around object segments for inference. (only works with segmap)
+`--filter_grasps`: Filter grasp contacts such that they only lie on the surface of object segments. (only works with segmap)
+`--skip_border_objects` Ignore segments touching the depth map boundary.
+`--forward_passes` number of (batched) forward passes. Increase to sample more potential grasp contacts.
+`--z_range` [min, max] z values in meter used to crop the input point cloud, e.g. to avoid grasps in the foreground/background(as above).
+`--arg_configs TEST.second_thres:0.19 TEST.first_thres:0.23` Overwrite config confidence thresholds for successful grasp contacts to get more/less grasp proposals
+
+
+## Training
+
+### Set up Acronym Dataset
+
+Follow the instructions at [docs/acronym_setup.md](docs/acronym_setup.md) to set up the Acronym dataset.
+
+### Set Environment Variables
+When training on a headless server set the environment variable
+```shell
+export PYOPENGL_PLATFORM='egl'
+```
+This is also done automatically in the training script.
+
+### Quickstart Training
+
+Start training with config `contact_graspnet_pytorch/config.yaml`
+```
+python3 contact_graspnet_pytorch/train.py --data_path acronym/
+```
+
+### Additional Training Options
+
+To set a custom model name and custom data path:
+
+```
+python contact_graspnet/train.py --ckpt_dir checkpoints/your_model_name \
+ --data_path /path/to/acronym/data
+```
+
+To restart a previous batch
+```
+python contact_graspnet/train.py --ckpt_dir checkpoints/previous_model_name \
+ --data_path /path/to/acronym/data
+```
+
+### Generate Contact Grasps and Scenes yourself (optional)
+
+See [docs/generate_scenes.md](docs/generate_scenes.md) for instructions on how to generate scenes and grasps yourself.
+
+## Citation
+If you find this work useful, please consider citing the author's original work and starring this repo.
+
+```
+@article{sundermeyer2021contact,
+ title={Contact-GraspNet: Efficient 6-DoF Grasp Generation in Cluttered Scenes},
+ author={Sundermeyer, Martin and Mousavian, Arsalan and Triebel, Rudolph and Fox, Dieter},
+ booktitle={2021 IEEE International Conference on Robotics and Automation (ICRA)},
+ year={2021}
+}
+```
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/__init__.py b/dimos/models/manipulation/contact_graspnet_pytorch/__init__.py
new file mode 100644
index 0000000000..ceca363d37
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/__init__.py
@@ -0,0 +1,30 @@
+"""
+ContactGraspNet - A PyTorch implementation of the Contact-GraspNet model.
+This package can be imported as `dimos.models.manipulation.contact_graspnet_pytorch`.
+"""
+
+import os
+import sys
+# Add necessary directories to Python path
+package_dir = os.path.dirname(os.path.abspath(__file__))
+contact_pytorch_dir = os.path.join(package_dir, 'contact_graspnet_pytorch')
+if contact_pytorch_dir not in sys.path:
+ sys.path.insert(0, contact_pytorch_dir)
+if package_dir not in sys.path:
+ sys.path.insert(0, package_dir)
+
+# Re-export the core classes for convenient access
+from contact_graspnet_pytorch.contact_graspnet import ContactGraspnet
+from contact_graspnet_pytorch.contact_grasp_estimator import GraspEstimator
+from contact_graspnet_pytorch.inference import inference
+from contact_graspnet_pytorch.mesh_utils import PandaGripper, Object, create_gripper
+
+# This makes it possible to access like dimos.models.manipulation.contact_graspnet_pytorch.GraspEstimator
+__all__ = [
+ 'GraspEstimator',
+ 'inference',
+ 'ContactGraspnet',
+ 'PandaGripper',
+ 'Object',
+ 'create_gripper'
+]
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/checkpoints/contact_graspnet/checkpoints/model.pt b/dimos/models/manipulation/contact_graspnet_pytorch/checkpoints/contact_graspnet/checkpoints/model.pt
new file mode 100644
index 0000000000..b52cf0c889
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/checkpoints/contact_graspnet/checkpoints/model.pt differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/checkpoints/contact_graspnet/config.yaml b/dimos/models/manipulation/contact_graspnet_pytorch/checkpoints/contact_graspnet/config.yaml
new file mode 100644
index 0000000000..1855c3abf5
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/checkpoints/contact_graspnet/config.yaml
@@ -0,0 +1,196 @@
+DATA:
+ classes: null
+ data_path: /home/user/Documents/grasp_generation/contact_graspnet_pytorch/acronym/
+ depth_augm:
+ clip: 0.005
+ gaussian_kernel: 0
+ sigma: 0.001
+ gripper_width: 0.08
+ input_normals: false
+ intrinsics: realsense
+ labels:
+ bin_weights:
+ - 0.16652107
+ - 0.21488856
+ - 0.37031708
+ - 0.55618503
+ - 0.75124664
+ - 0.93943357
+ - 1.07824539
+ - 1.19423112
+ - 1.55731375
+ - 3.17161779
+ filter_z: true
+ k: 1
+ max_radius: 0.005
+ num_neg_contacts: 0
+ num_pos_contacts: 8000
+ offset_bins:
+ - 0
+ - 0.00794435329
+ - 0.0158887021
+ - 0.0238330509
+ - 0.0317773996
+ - 0.0397217484
+ - 0.0476660972
+ - 0.055610446
+ - 0.0635547948
+ - 0.0714991435
+ - 0.08
+ to_gpu: false
+ z_val: -0.1
+ ndataset_points: 20000
+ num_point: 2048
+ num_test_scenes: 1000
+ pc_augm:
+ clip: 0.005
+ occlusion_dropout_rate: 0.0
+ occlusion_nclusters: 0
+ sigma: 0.0
+ raw_num_points: 20000
+ scene_contacts_path: scene_contacts
+ train_and_test: false
+ train_on_scenes: true
+ use_farthest_point: false
+ use_uniform_quaternions: false
+ view_sphere:
+ distance_range:
+ - 0.9
+ - 1.3
+ elevation:
+ - 30
+ - 150
+LOSS:
+ min_geom_loss_divisor: 1.0
+ offset_loss_type: sigmoid_cross_entropy
+ too_small_offset_pred_bin_factor: 0
+ topk_confidence: 512
+MODEL:
+ asymmetric_model: true
+ bin_offsets: true
+ dir_vec_length_offset: false
+ grasp_conf_head:
+ conv1d: 1
+ dropout_keep: 0.5
+ grasp_dir_head:
+ conv1d: 3
+ dropout_keep: 0.7
+ joint_head:
+ conv1d: 4
+ dropout_keep: 0.7
+ joint_heads: false
+ model: contact_graspnet
+ pointnet_fp_modules:
+ - mlp:
+ - 256
+ - 256
+ - mlp:
+ - 256
+ - 128
+ - mlp:
+ - 128
+ - 128
+ - 128
+ pointnet_sa_module:
+ group_all: true
+ mlp:
+ - 256
+ - 512
+ - 1024
+ pointnet_sa_modules_msg:
+ - mlp_list:
+ - - 32
+ - 32
+ - 64
+ - - 64
+ - 64
+ - 128
+ - - 64
+ - 96
+ - 128
+ npoint: 2048
+ nsample_list:
+ - 32
+ - 64
+ - 128
+ radius_list:
+ - 0.02
+ - 0.04
+ - 0.08
+ - mlp_list:
+ - - 64
+ - 64
+ - 128
+ - - 128
+ - 128
+ - 256
+ - - 128
+ - 128
+ - 256
+ npoint: 512
+ nsample_list:
+ - 64
+ - 64
+ - 128
+ radius_list:
+ - 0.04
+ - 0.08
+ - 0.16
+ - mlp_list:
+ - - 64
+ - 64
+ - 128
+ - - 128
+ - 128
+ - 256
+ - - 128
+ - 128
+ - 256
+ npoint: 128
+ nsample_list:
+ - 64
+ - 64
+ - 128
+ radius_list:
+ - 0.08
+ - 0.16
+ - 0.32
+ pred_contact_approach: false
+ pred_contact_base: false
+ pred_contact_offset: true
+ pred_contact_success: true
+ pred_grasps_adds: true
+ pred_grasps_adds_gt2pred: false
+OPTIMIZER:
+ adds_gt2pred_loss_weight: 1
+ adds_loss_weight: 10
+ approach_cosine_loss_weight: 1
+ backup_every: 200
+ batch_size: 6
+ bn_decay_clip: 0.99
+ bn_decay_decay_rate: 0.5
+ bn_decay_decay_step: 200000
+ bn_init_decay: 0.5
+ checkpoint_every: 100
+ decay_rate: 0.7
+ decay_step: 200000
+ dir_cosine_loss_weight: 1
+ learning_rate: 0.001
+ max_epoch: 200
+ momentum: 0.9
+ offset_loss_weight: 1
+ optimizer: adam
+ print_every: 10
+ score_ce_loss_weight: 1
+ val_every: 1
+TEST:
+ allow_zero_margin: 0
+ bin_vals: max
+ center_to_tip: 0.0
+ extra_opening: 0.005
+ filter_thres: 0.0001
+ first_thres: 0.15
+ max_farthest_points: 150
+ num_samples: 200
+ second_thres: 0.15
+ with_replacement: false
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/checkpoints/contact_graspnet/contact_graspnet.py b/dimos/models/manipulation/contact_graspnet_pytorch/checkpoints/contact_graspnet/contact_graspnet.py
new file mode 100644
index 0000000000..3f1668d68b
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/checkpoints/contact_graspnet/contact_graspnet.py
@@ -0,0 +1,750 @@
+import os
+import sys
+import numpy as np
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+
+
+# Import pointnet library
+BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+ROOT_DIR = os.path.dirname(os.path.dirname(BASE_DIR))
+
+sys.path.append(os.path.join(BASE_DIR))
+sys.path.append(os.path.join(BASE_DIR, 'Pointnet_Pointnet2_pytorch'))
+# sys.path.append(os.path.join(BASE_DIR, 'pointnet2', 'utils'))
+
+# from tf_sampling import farthest_point_sample, gather_point
+# from tf_grouping import query_ball_point, group_point, knn_point
+
+from models.pointnet2_utils import PointNetSetAbstractionMsg, PointNetSetAbstraction, PointNetFeaturePropagation
+from contact_graspnet_pytorch import mesh_utils, utils
+
+class ContactGraspnet(nn.Module):
+ def __init__(self, global_config, device):
+ super(ContactGraspnet, self).__init__()
+
+ self.device = device
+ # self.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
+
+ # -- Extract config -- #
+ self.global_config = global_config
+ self.model_config = global_config['MODEL']
+ self.data_config = global_config['DATA']
+
+ radius_list_0 = self.model_config['pointnet_sa_modules_msg'][0]['radius_list']
+ radius_list_1 = self.model_config['pointnet_sa_modules_msg'][1]['radius_list']
+ radius_list_2 = self.model_config['pointnet_sa_modules_msg'][2]['radius_list']
+
+ nsample_list_0 = self.model_config['pointnet_sa_modules_msg'][0]['nsample_list']
+ nsample_list_1 = self.model_config['pointnet_sa_modules_msg'][1]['nsample_list']
+ nsample_list_2 = self.model_config['pointnet_sa_modules_msg'][2]['nsample_list']
+
+ mlp_list_0 = self.model_config['pointnet_sa_modules_msg'][0]['mlp_list'] # list of lists
+ mlp_list_1 = self.model_config['pointnet_sa_modules_msg'][1]['mlp_list']
+ mlp_list_2 = self.model_config['pointnet_sa_modules_msg'][2]['mlp_list']
+
+ npoint_0 = self.model_config['pointnet_sa_modules_msg'][0]['npoint']
+ npoint_1 = self.model_config['pointnet_sa_modules_msg'][1]['npoint']
+ npoint_2 = self.model_config['pointnet_sa_modules_msg'][2]['npoint']
+
+ fp_mlp_0 = self.model_config['pointnet_fp_modules'][0]['mlp']
+ fp_mlp_1 = self.model_config['pointnet_fp_modules'][1]['mlp']
+ fp_mlp_2 = self.model_config['pointnet_fp_modules'][2]['mlp']
+
+ sa_mlp = self.model_config['pointnet_sa_module']['mlp']
+ sa_group_all = self.model_config['pointnet_sa_module']['group_all']
+
+ self.input_normals = self.data_config['input_normals']
+ self.offset_bins = self.data_config['labels']['offset_bins']
+ self.joint_heads = self.model_config['joint_heads']
+
+ # For adding additional features
+ additional_channel = 3 if self.input_normals else 0
+
+ if 'asymmetric_model' in self.model_config and self.model_config['asymmetric_model']:
+ # It looks like there are xyz_points and "points". The points are normals I think?
+ self.sa1 = PointNetSetAbstractionMsg(npoint=npoint_0,
+ radius_list=radius_list_0,
+ nsample_list=nsample_list_0,
+ in_channel = 3 + additional_channel,
+ mlp_list=mlp_list_0)
+
+ # Sum the size of last layer of each mlp in mlp_list
+ sa1_out_channels = sum([mlp_list_0[i][-1] for i in range(len(mlp_list_0))])
+ self.sa2 = PointNetSetAbstractionMsg(npoint=npoint_1,
+ radius_list=radius_list_1,
+ nsample_list=nsample_list_1,
+ in_channel=sa1_out_channels,
+ mlp_list=mlp_list_1)
+
+ sa2_out_channels = sum([mlp_list_1[i][-1] for i in range(len(mlp_list_1))])
+ self.sa3 = PointNetSetAbstractionMsg(npoint=npoint_2,
+ radius_list=radius_list_2,
+ nsample_list=nsample_list_2,
+ in_channel=sa2_out_channels,
+ mlp_list=mlp_list_2)
+
+ sa3_out_channels = sum([mlp_list_2[i][-1] for i in range(len(mlp_list_2))])
+ self.sa4 = PointNetSetAbstraction(npoint=None,
+ radius=None,
+ nsample=None,
+ in_channel=3 + sa3_out_channels,
+ mlp=sa_mlp,
+ group_all=sa_group_all)
+
+ self.fp3 = PointNetFeaturePropagation(in_channel=sa_mlp[-1] + sa3_out_channels,
+ mlp=fp_mlp_2)
+
+ self.fp2 = PointNetFeaturePropagation(in_channel=fp_mlp_2[-1] + sa2_out_channels,
+ mlp=fp_mlp_1)
+
+ self.fp1 = PointNetFeaturePropagation(in_channel=fp_mlp_1[-1] + sa1_out_channels,
+ mlp=fp_mlp_0)
+ else:
+ raise NotImplementedError
+
+ if self.joint_heads:
+ raise NotImplementedError
+ else:
+ # Theres prob some bugs here
+
+ # Head for grasp direction
+ self.grasp_dir_head = nn.Sequential(
+ nn.Conv1d(fp_mlp_0[-1], 128, 1, padding=0),
+ nn.BatchNorm1d(128),
+ nn.ReLU(),
+ nn.Dropout(0.3), # p = 1 - keep_prob (tf is inverse of torch)
+ nn.Conv1d(128, 3, 1, padding=0)
+ )
+
+ # Remember to normalize the output of this head
+
+ # Head for grasp approach
+ self.grasp_approach_head = nn.Sequential(
+ nn.Conv1d(fp_mlp_0[-1], 128, 1, padding=0),
+ nn.BatchNorm1d(128),
+ nn.ReLU(),
+ nn.Dropout(0.3),
+ nn.Conv1d(128, 3, 1, padding=0)
+ )
+
+ # Head for grasp width
+ if self.model_config['dir_vec_length_offset']:
+ raise NotImplementedError
+ elif self.model_config['bin_offsets']:
+ self.grasp_offset_head = nn.Sequential(
+ nn.Conv1d(fp_mlp_0[-1], 128, 1, padding=0),
+ nn.BatchNorm1d(128),
+ nn.ReLU(),
+ nn.Conv1d(128, len(self.offset_bins) - 1, 1, padding=0)
+ )
+ else:
+ raise NotImplementedError
+
+ # Head for contact points
+ self.binary_seg_head = nn.Sequential(
+ nn.Conv1d(fp_mlp_0[-1], 128, 1, padding=0),
+ nn.BatchNorm1d(128),
+ nn.ReLU(),
+ nn.Dropout(0.5), # 0.5 in original code
+ nn.Conv1d(128, 1, 1, padding=0)
+ )
+
+ def forward(self, point_cloud):
+
+ # expensive, rather use random only
+ if 'raw_num_points' in self.data_config and self.data_config['raw_num_points'] != self.data_config['ndataset_points']:
+ raise NotImplementedError
+ point_cloud = gather_point(point_cloud, farthest_point_sample(data_config['ndataset_points'], point_cloud))
+
+ # Convert from tf to torch ordering
+ point_cloud = torch.transpose(point_cloud, 1, 2) # Now we have batch x channels (3 or 6) x num_points
+
+
+ l0_xyz = point_cloud[:, :3, :]
+ l0_points = point_cloud[:, 3:6, :] if self.input_normals else l0_xyz.clone()
+
+ # -- PointNet Backbone -- #
+ # Set Abstraction Layers
+ l1_xyz, l1_points = self.sa1(l0_xyz, l0_points)
+ l2_xyz, l2_points = self.sa2(l1_xyz, l1_points)
+ l3_xyz, l3_points = self.sa3(l2_xyz, l2_points)
+ l4_xyz, l4_points = self.sa4(l3_xyz, l3_points)
+
+ # Feature Propagation Layers
+ l3_points = self.fp3(l3_xyz, l4_xyz, l3_points, l4_points)
+ l2_points = self.fp2(l2_xyz, l3_xyz, l2_points, l3_points)
+ l1_points = self.fp1(l1_xyz, l2_xyz, l1_points, l2_points)
+
+ l0_points = l1_points
+ pred_points = l1_xyz
+
+ # -- Heads -- #
+ # Grasp Direction Head
+ grasp_dir_head = self.grasp_dir_head(l0_points)
+ grasp_dir_head_normed = F.normalize(grasp_dir_head, p=2, dim=1) # normalize along channels
+
+ # Grasp Approach Head
+ approach_dir_head = self.grasp_approach_head(l0_points)
+
+ # computer gram schmidt orthonormalization
+ dot_product = torch.sum(grasp_dir_head_normed * approach_dir_head, dim=1, keepdim=True)
+ projection = dot_product * grasp_dir_head_normed
+ approach_dir_head_orthog = F.normalize(approach_dir_head - projection, p=2, dim=1)
+
+ # Grasp Width Head
+ if self.model_config['dir_vec_length_offset']:
+ raise NotImplementedError
+ grasp_offset_head = F.normalize(grasp_dir_head, p=2, dim=1)
+ elif self.model_config['bin_offsets']:
+ grasp_offset_head = self.grasp_offset_head(l0_points)
+
+ # Binary Segmentation Head
+ binary_seg_head = self.binary_seg_head(l0_points)
+
+ # -- Create end_points -- #
+ # I don't think this is needed anymore
+ # end_points = {}
+ # end_points['grasp_dir_head'] = grasp_dir_head
+ # end_points['binary_seg_head'] = binary_seg_head
+ # end_points['binary_seg_pred'] = torch.sigmoid(binary_seg_head)
+ # end_points['grasp_offset_head'] = grasp_offset_head
+ # if self.model_config['bin_offsets']:
+ # end_points['grasp_offset_pred'] = torch.sigmoid(grasp_offset_head)
+ # else:
+ # end_points['grasp_offset_pred'] = grasp_offset_head
+ # end_points['approach_dir_head'] = approach_dir_head_orthog
+ # end_points['pred_points'] = pred_points
+
+ # # Get back to tf ordering
+ # for k, points in end_points.items():
+ # end_points[k] = points.permute(0, 2, 1)
+
+ # Expected outputs:
+ # pred_grasps_cam, pred_scores, pred_points, offset_pred = self.model(pc_batch)
+
+ # -- Construct Output -- #
+ # Get 6 DoF grasp pose
+ torch_bin_vals = self.get_bin_vals()
+
+ # PyTorch equivalent of tf.gather_nd with conditional
+ # I think the output should be B x N
+ if self.model_config['bin_offsets']:
+ argmax_indices = torch.argmax(grasp_offset_head, dim=1, keepdim=True)
+ offset_bin_pred_vals = torch_bin_vals[argmax_indices] # kinda sketch but works?
+ # expand_dims_indices = argmax_indices.unsqueeze(1)
+ # offset_bin_pred_vals = torch.gather(torch_bin_vals, 1, argmax_indices)
+ else:
+ offset_bin_pred_vals = grasp_offset_head[:, 0, :]
+
+ # TODO Start here
+ pred_grasps_cam = self.build_6d_grasp(approach_dir_head_orthog.permute(0, 2, 1),
+ grasp_dir_head.permute(0, 2, 1),
+ pred_points.permute(0, 2, 1),
+ offset_bin_pred_vals.permute(0, 2, 1),
+ use_torch=True) # B x N x 4 x 4
+
+ # Get pred scores
+ pred_scores = torch.sigmoid(binary_seg_head).permute(0, 2, 1)
+
+ # Get pred points
+ pred_points = pred_points.permute(0, 2, 1)
+
+ # Get pred offsets
+ offset_pred = offset_bin_pred_vals
+
+ # -- Values to compute loss on -- #
+ # intermediates = {}
+ # intermediates['grasp_offset_head'] = grasp_offset_head
+
+ pred = dict(
+ pred_grasps_cam=pred_grasps_cam,
+ pred_scores=pred_scores,
+ pred_points=pred_points,
+ offset_pred=offset_pred,
+ grasp_offset_head=grasp_offset_head # For loss
+ )
+
+ # return pred_grasps_cam, pred_scores, pred_points, offset_pred, intermediates
+ return pred
+
+
+ def get_bin_vals(self):
+ """
+ Creates bin values for grasping widths according to bounds defined in config
+
+ Arguments:
+ global_config {dict} -- config
+
+ Returns:
+ torch.tensor -- bin value tensor
+ """
+ bins_bounds = np.array(self.data_config['labels']['offset_bins'])
+ if self.global_config['TEST']['bin_vals'] == 'max':
+ bin_vals = (bins_bounds[1:] + bins_bounds[:-1])/2
+ bin_vals[-1] = bins_bounds[-1]
+ elif self.global_config['TEST']['bin_vals'] == 'mean':
+ bin_vals = bins_bounds[1:]
+ else:
+ raise NotImplementedError
+
+ if not self.global_config['TEST']['allow_zero_margin']:
+ bin_vals = np.minimum(bin_vals, self.global_config['DATA']['gripper_width'] \
+ -self.global_config['TEST']['extra_opening'])
+
+ bin_vals = torch.tensor(bin_vals, dtype=torch.float32).to(self.device)
+ return bin_vals
+
+
+
+ # def build_6d_grasp(self, approach_dirs, base_dirs, contact_pts, thickness, use_tf=False, gripper_depth = 0.1034):
+ def build_6d_grasp(self, approach_dirs, base_dirs, contact_pts, thickness, use_torch=False, gripper_depth = 0.1034):
+ """
+ Build 6-DoF grasps + width from point-wise network predictions
+
+ Arguments:
+ approach_dirs {np.ndarray/tf.tensor} -- Nx3 approach direction vectors
+ base_dirs {np.ndarray/tf.tensor} -- Nx3 base direction vectors
+ contact_pts {np.ndarray/tf.tensor} -- Nx3 contact points
+ thickness {np.ndarray/tf.tensor} -- Nx1 grasp width
+
+ Keyword Arguments:
+ use_tf {bool} -- whether inputs and outputs are tf tensors (default: {False})
+ gripper_depth {float} -- distance from gripper coordinate frame to gripper baseline in m (default: {0.1034})
+
+ Returns:
+ np.ndarray -- Nx4x4 grasp poses in camera coordinates
+ """
+ # We are trying to build a stack of 4x4 homogeneous transform matricies of size B x N x 4 x 4.
+ # To do so, we calculate the rotation and translation portions according to the paper.
+ # This gives us positions as shown:
+ # [ R R R T ]
+ # [ R R R T ]
+ # [ R R R T ]
+ # [ 0 0 0 1 ] Note that the ^ dim is 2 and the --> dim is 3
+ # We need to pad with zeros and ones to get the final shape so we generate
+ # ones and zeros and stack them.
+ if use_torch:
+ grasp_R = torch.stack([base_dirs, torch.cross(approach_dirs,base_dirs),approach_dirs], dim=3) # B x N x 3 x 3
+ grasp_t = contact_pts + (thickness / 2) * base_dirs - gripper_depth * approach_dirs # B x N x 3
+ grasp_t = grasp_t.unsqueeze(3) # B x N x 3 x 1
+ ones = torch.ones((contact_pts.shape[0], contact_pts.shape[1], 1, 1), dtype=torch.float32).to(self.device) # B x N x 1 x 1
+ zeros = torch.zeros((contact_pts.shape[0], contact_pts.shape[1], 1, 3), dtype=torch.float32).to(self.device) # B x N x 1 x 3
+ homog_vec = torch.cat([zeros, ones], dim=3) # B x N x 1 x 4
+ grasps = torch.cat([torch.cat([grasp_R, grasp_t], dim=3), homog_vec], dim=2) # B x N x 4 x 4
+
+ else:
+ grasps = []
+ for i in range(len(contact_pts)):
+ grasp = np.eye(4)
+
+ grasp[:3,0] = base_dirs[i] / np.linalg.norm(base_dirs[i])
+ grasp[:3,2] = approach_dirs[i] / np.linalg.norm(approach_dirs[i])
+ grasp_y = np.cross( grasp[:3,2],grasp[:3,0])
+ grasp[:3,1] = grasp_y / np.linalg.norm(grasp_y)
+ # base_gripper xyz = contact + thickness / 2 * baseline_dir - gripper_d * approach_dir
+ grasp[:3,3] = contact_pts[i] + thickness[i] / 2 * grasp[:3,0] - gripper_depth * grasp[:3,2]
+ # grasp[0,3] = finger_width
+ grasps.append(grasp)
+ grasps = np.array(grasps)
+
+ return grasps
+
+class ContactGraspnetLoss(nn.Module):
+ def __init__(self, global_config, device):
+ super(ContactGraspnetLoss, self).__init__()
+ self.global_config = global_config
+
+ # -- Process config -- #
+ config_losses = [
+ 'pred_contact_base',
+ 'pred_contact_success', # True
+ 'pred_contact_offset', # True
+ 'pred_contact_approach',
+ 'pred_grasps_adds', # True
+ 'pred_grasps_adds_gt2pred'
+ ]
+
+ config_weights = [
+ 'dir_cosine_loss_weight',
+ 'score_ce_loss_weight', # True
+ 'offset_loss_weight', # True
+ 'approach_cosine_loss_weight',
+ 'adds_loss_weight', # True
+ 'adds_gt2pred_loss_weight'
+ ]
+
+ self.device = device
+
+ bin_weights = global_config['DATA']['labels']['bin_weights']
+ self.bin_weights = torch.tensor(bin_weights).to(self.device)
+ self.bin_vals = self._get_bin_vals().to(self.device)
+
+ for config_loss, config_weight in zip(config_losses, config_weights):
+ if global_config['MODEL'][config_loss]:
+ setattr(self, config_weight, global_config['OPTIMIZER'][config_weight])
+ else:
+ setattr(self, config_weight, 0.0)
+
+ self.gripper = mesh_utils.create_gripper('panda')
+ # gripper_control_points = self.gripper.get_control_point_tensor(self.global_config['OPTIMIZER']['batch_size']) # b x 5 x 3
+ # sym_gripper_control_points = self.gripper.get_control_point_tensor(self.global_config['OPTIMIZER']['batch_size'], symmetric=True)
+
+ # self.gripper_control_points_homog = torch.cat([gripper_control_points,
+ # torch.ones((self.global_config['OPTIMIZER']['batch_size'], gripper_control_points.shape[1], 1))], dim=2) # b x 5 x 4
+ # self.sym_gripper_control_points_homog = torch.cat([sym_gripper_control_points,
+ # torch.ones((self.global_config['OPTIMIZER']['batch_size'], gripper_control_points.shape[1], 1))], dim=2) # b x 5 x 4
+
+ n_copies = 1 # We will repeat this according to the batch size
+ gripper_control_points = self.gripper.get_control_point_tensor(n_copies) # b x 5 x 3
+ sym_gripper_control_points = self.gripper.get_control_point_tensor(n_copies, symmetric=True)
+
+ self.gripper_control_points_homog = torch.cat([gripper_control_points,
+ torch.ones((n_copies, gripper_control_points.shape[1], 1))], dim=2) # b x 5 x 4
+ self.sym_gripper_control_points_homog = torch.cat([sym_gripper_control_points,
+ torch.ones((n_copies, gripper_control_points.shape[1], 1))], dim=2) # b x 5 x 4
+
+ self.gripper_control_points_homog = self.gripper_control_points_homog.to(self.device)
+ self.sym_gripper_control_points_homog = self.sym_gripper_control_points_homog.to(self.device)
+
+
+ def forward(self, pred, target):
+ """
+ Computes loss terms from pointclouds, network predictions and labels
+
+ Arguments:
+ pointclouds_pl {tf.placeholder} -- bxNx3 input point clouds
+ end_points {dict[str:tf.variable]} -- endpoints of the network containing predictions
+ dir_labels_pc_cam {tf.variable} -- base direction labels in camera coordinates (bxNx3)
+ offset_labels_pc {tf.variable} -- grasp width labels (bxNx1)
+ grasp_success_labels_pc {tf.variable} -- contact success labels (bxNx1)
+ approach_labels_pc_cam {tf.variable} -- approach direction labels in camera coordinates (bxNx3)
+ global_config {dict} -- config dict
+
+ Returns:
+ [dir_cosine_loss, bin_ce_loss, offset_loss, approach_cosine_loss, adds_loss,
+ adds_loss_gt2pred, gt_control_points, pred_control_points, pos_grasps_in_view] -- All losses (not all are used for training)
+ """
+ pred_grasps_cam = pred['pred_grasps_cam'] # B x N x 4 x 4
+ pred_scores = pred['pred_scores'] # B x N x 1
+ pred_points = pred['pred_points'] # B x N x 3
+ # offset_pred = pred['offset_pred'] # B x N # We use the grasp_offset_head instead of this
+ grasp_offset_head = pred['grasp_offset_head'].permute(0, 2, 1) # B x N x 10
+
+
+ # # Generated in acronym_dataloader.py
+ # grasp_success_labels_pc = target['grasp_success_label'] # B x N
+ # grasp_offset_labels_pc = target['grasp_diff_label'] # B x N x 3
+
+ # approach_labels_pc_cam = target['grasp_approach_label'] # B x N x 3
+ # dir_labels_pc_cam = target['grasp_dir_label'] # B x N x 3
+ # pointclouds_pl = target['pc_cam'] # B x N x 3
+
+ # -- Interpolate Labels -- #
+ pos_contact_points = target['pos_contact_points'] # B x M x 3
+ pos_contact_dirs = target['pos_contact_dirs'] # B x M x 3
+ pos_finger_diffs = target['pos_finger_diffs'] # B x M
+ pos_approach_dirs = target['pos_approach_dirs'] # B x M x 3
+ camera_pose = target['camera_pose'] # B x 4 x 4
+
+ dir_labels_pc_cam, \
+ grasp_offset_labels_pc, \
+ grasp_success_labels_pc, \
+ approach_labels_pc_cam, \
+ debug = self._compute_labels(pred_points,
+ camera_pose,
+ pos_contact_points,
+ pos_contact_dirs,
+ pos_finger_diffs,
+ pos_approach_dirs)
+
+ # I think this is the number of positive grasps that are in view
+ min_geom_loss_divisor = float(self.global_config['LOSS']['min_geom_loss_divisor']) # This is 1.0
+ pos_grasps_in_view = torch.clamp(grasp_success_labels_pc.sum(dim=1), min=min_geom_loss_divisor) # B
+ # pos_grasps_in_view = torch.maximum(grasp_success_labels_pc.sum(dim=1), min_geom_loss_divisor) # B
+
+ total_loss = 0.0
+
+ if self.dir_cosine_loss_weight > 0:
+ raise NotImplementedError
+
+ # -- Grasp Confidence Loss -- #
+ if self.score_ce_loss_weight > 0: # TODO (bin_ce_loss)
+ bin_ce_loss = F.binary_cross_entropy(pred_scores, grasp_success_labels_pc, reduction='none') # B x N x 1
+ if 'topk_confidence' in self.global_config['LOSS'] \
+ and self.global_config['LOSS']['topk_confidence']:
+ bin_ce_loss, _ = torch.topk(bin_ce_loss.squeeze(), k=self.global_config['LOSS']['topk_confidence'])
+ bin_ce_loss = torch.mean(bin_ce_loss)
+
+ total_loss += self.score_ce_loss_weight * bin_ce_loss
+
+ # -- Grasp Offset / Thickness Loss -- #
+ if self.offset_loss_weight > 0: # TODO (offset_loss)
+ if self.global_config['MODEL']['bin_offsets']:
+ # Convert labels to multihot
+ bin_vals = self.global_config['DATA']['labels']['offset_bins']
+ grasp_offset_labels_multihot = self._bin_label_to_multihot(grasp_offset_labels_pc,
+ bin_vals)
+
+ if self.global_config['LOSS']['offset_loss_type'] == 'softmax_cross_entropy':
+ raise NotImplementedError
+
+ else:
+ offset_loss = F.binary_cross_entropy_with_logits(grasp_offset_head,
+ grasp_offset_labels_multihot, reduction='none') # B x N x 1
+ if 'too_small_offset_pred_bin_factor' in self.global_config['LOSS'] \
+ and self.global_config['LOSS']['too_small_offset_pred_bin_factor']:
+ raise NotImplementedError
+
+ # Weight loss for each bin
+ shaped_bin_weights = self.bin_weights[None, None, :]
+ offset_loss = (shaped_bin_weights * offset_loss).mean(axis=2)
+ else:
+ raise NotImplementedError
+ masked_offset_loss = offset_loss * grasp_success_labels_pc.squeeze()
+ # Divide each batch by the number of successful grasps in the batch
+ offset_loss = torch.mean(torch.sum(masked_offset_loss, axis=1, keepdim=True) / pos_grasps_in_view)
+
+ total_loss += self.offset_loss_weight * offset_loss
+
+ if self.approach_cosine_loss_weight > 0:
+ raise NotImplementedError
+
+ # -- 6 Dof Pose Loss -- #
+ if self.adds_loss_weight > 0: # TODO (adds_loss)
+ # Build groudn truth grasps and compare distances to predicted grasps
+
+ ### ADS Gripper PC Loss
+ # Get 6 DoF pose of predicted grasp
+ if self.global_config['MODEL']['bin_offsets']:
+ thickness_gt = self.bin_vals[torch.argmax(grasp_offset_labels_pc, dim=2)]
+ else:
+ thickness_gt = grasp_offset_labels_pc[:, :, 0]
+
+ # TODO: Move this to dataloader?
+ pred_grasps = pred_grasps_cam # B x N x 4 x 4
+ gt_grasps_proj = utils.build_6d_grasp(approach_labels_pc_cam,
+ dir_labels_pc_cam,
+ pred_points,
+ thickness_gt,
+ use_torch=True,
+ device=self.device) # b x N x 4 x 4
+ # Select positive grasps I think?
+ success_mask = grasp_success_labels_pc.bool()[:, :, :, None] # B x N x 1 x 1
+ success_mask = torch.broadcast_to(success_mask, gt_grasps_proj.shape) # B x N x 4 x 4
+ pos_gt_grasps_proj = torch.where(success_mask, gt_grasps_proj, torch.ones_like(gt_grasps_proj) * 100000) # B x N x 4 x 4
+
+ # Expand gripper control points to match number of points
+ # only use per point pred grasps but not per point gt grasps
+ control_points = self.gripper_control_points_homog.unsqueeze(1) # 1 x 1 x 5 x 4
+ control_points = control_points.repeat(pred_points.shape[0], pred_points.shape[1], 1, 1) # b x N x 5 x 4
+
+ sym_control_points = self.sym_gripper_control_points_homog.unsqueeze(1) # 1 x 1 x 5 x 4
+ sym_control_points = sym_control_points.repeat(pred_points.shape[0], pred_points.shape[1], 1, 1) # b x N x 5 x 4
+
+ pred_control_points = torch.matmul(control_points, pred_grasps.permute(0, 1, 3, 2))[:, :, :, :3] # b x N x 5 x 3
+
+ # Transform control points to ground truth locations
+ gt_control_points = torch.matmul(control_points, pos_gt_grasps_proj.permute(0, 1, 3, 2))[:, :, :, :3] # b x N x 5 x 3
+ sym_gt_control_points = torch.matmul(sym_control_points, pos_gt_grasps_proj.permute(0, 1, 3, 2))[:, :, :, :3] # b x N x 5 x 3
+
+ # Compute distances between predicted and ground truth control points
+ expanded_pred_control_points = pred_control_points.unsqueeze(2) # B x N x 1 x 5 x 3
+ expanded_gt_control_points = gt_control_points.unsqueeze(1) # B x 1 x N' x 5 x 3 I think N' == N
+ expanded_sym_gt_control_points = sym_gt_control_points.unsqueeze(1) # B x 1 x N' x 5 x 3 I think N' == N
+
+ # Sum of squared distances between all points
+ squared_add = torch.sum((expanded_pred_control_points - expanded_gt_control_points)**2, dim=(3, 4)) # B x N x N'
+ sym_squared_add = torch.sum((expanded_pred_control_points - expanded_sym_gt_control_points)**2, dim=(3, 4)) # B x N x N'
+
+ # Combine distances between gt and symmetric gt grasps
+ squared_adds = torch.concat([squared_add, sym_squared_add], dim=2) # B x N x 2N'
+
+ # Take min distance to gt grasp for each predicted grasp
+ squared_adds_k = torch.topk(squared_adds, k=1, dim=2, largest=False)[0] # B x N
+
+ # Mask negative grasps
+ # TODO: If there are bugs, its prob here. The original code sums on axis=1
+ # Which just determines if there is a successful grasp in the batch.
+ # I think we just want to select the positive grasps so the sum is redundant.
+ sum_grasp_success_labels = torch.sum(grasp_success_labels_pc, dim=2, keepdim=True)
+ binary_grasp_success_labels = torch.clamp(sum_grasp_success_labels, 0, 1)
+ min_adds = binary_grasp_success_labels * torch.sqrt(squared_adds_k) # B x N x 1
+ adds_loss = torch.sum(pred_scores * min_adds, dim=(1), keepdim=True) # B x 1
+ adds_loss = adds_loss.squeeze() / pos_grasps_in_view.squeeze() # B x 1
+ adds_loss = torch.mean(adds_loss)
+ total_loss += self.adds_loss_weight * adds_loss
+
+ if self.adds_gt2pred_loss_weight > 0:
+ raise NotImplementedError
+
+ loss_info = {
+ 'bin_ce_loss': bin_ce_loss, # Grasp success loss
+ 'offset_loss': offset_loss, # Grasp width loss
+ 'adds_loss': adds_loss, # Pose loss
+ }
+
+ return total_loss, loss_info
+
+ def _get_bin_vals(self):
+ """
+ Creates bin values for grasping widths according to bounds defined in config
+
+ Arguments:
+ global_config {dict} -- config
+
+ Returns:
+ tf.constant -- bin value tensor
+ """
+ bins_bounds = np.array(self.global_config['DATA']['labels']['offset_bins'])
+ if self.global_config['TEST']['bin_vals'] == 'max':
+ bin_vals = (bins_bounds[1:] + bins_bounds[:-1])/2
+ bin_vals[-1] = bins_bounds[-1]
+ elif self.global_config['TEST']['bin_vals'] == 'mean':
+ bin_vals = bins_bounds[1:]
+ else:
+ raise NotImplementedError
+
+ if not self.global_config['TEST']['allow_zero_margin']:
+ bin_vals = np.minimum(bin_vals, self.global_config['DATA']['gripper_width']-self.global_config['TEST']['extra_opening'])
+
+ bin_vals = torch.tensor(bin_vals, dtype=torch.float32)
+ return bin_vals
+
+ def _bin_label_to_multihot(self, cont_labels, bin_boundaries):
+ """
+ Computes binned grasp width labels from continuous labels and bin boundaries
+
+ Arguments:
+ cont_labels {torch.Tensor} -- continuous labels
+ bin_boundaries {list} -- bin boundary values
+
+ Returns:
+ torch.Tensor -- one/multi hot bin labels
+ """
+ bins = []
+ for b in range(len(bin_boundaries)-1):
+ bins.append(torch.logical_and(torch.greater_equal(cont_labels, bin_boundaries[b]), torch.less(cont_labels, bin_boundaries[b+1])))
+ multi_hot_labels = torch.cat(bins, dim=2)
+ multi_hot_labels = multi_hot_labels.to(torch.float32)
+
+ return multi_hot_labels
+
+
+ def _compute_labels(self,
+ processed_pc_cams: torch.Tensor,
+ camera_poses: torch.Tensor,
+ pos_contact_points: torch.Tensor,
+ pos_contact_dirs: torch.Tensor,
+ pos_finger_diffs: torch.Tensor,
+ pos_approach_dirs: torch.Tensor):
+ """
+ Project grasp labels defined on meshes onto rendered point cloud
+ from a camera pose via nearest neighbor contacts within a maximum radius.
+ All points without nearby successful grasp contacts are considered
+ negative contact points.
+
+ Here N is the number of points returned by the PointNet Encoder (2048) while
+ M is the number of points in the ground truth data. B is the batch size.
+ We are trying to assign a label to each of the PointNet points by
+ sampling the nearest ground truth points.
+
+ Arguments:
+ pc_cam_pl (torch.Tensor): (B, N, 3) point cloud in camera frame
+ camera_pose_pl (torch.Tensor): (B, 4, 4) homogenous camera pose
+ pos_contact_points (torch.Tensor): (B, M, 3) contact points in world frame (3 DoF points)
+ pos_contact_dirs (torch.Tensor): (B, M, 3) contact directions (origin centered vectors?)
+ pos_finger_diffs (torch.Tensor): (B, M, ) finger diffs in world frame (scalar distances)
+ pos_approach_dirs (torch.Tensor): (B, M, 3) approach directions in world frame (origin centered vectors?)
+ """
+ label_config = self.global_config['DATA']['labels']
+
+ nsample = label_config['k'] # Currently set to 1
+ radius = label_config['max_radius']
+ filter_z = label_config['filter_z']
+ z_val = label_config['z_val']
+
+ _, N, _ = processed_pc_cams.shape
+ B, M, _ = pos_contact_points.shape
+
+ # -- Make sure pcd is B x N x 3 -- #
+ if processed_pc_cams.shape[2] != 3:
+ xyz_cam = processed_pc_cams[:,:,:3] # N x 3
+ else:
+ xyz_cam = processed_pc_cams
+
+ # -- Transform Ground Truth to Camera Frame -- #
+ # Transform contact points to camera frame (This is a homogenous transform)
+ # We use matmul to accommodate batch
+ # pos_contact_points_cam = pos_contact_points @ (camera_poses[:3,:3].T) + camera_poses[:3,3][None,:]
+ pos_contact_points_cam = torch.matmul(pos_contact_points, camera_poses[:, :3, :3].transpose(1, 2)) \
+ + camera_poses[:,:3,3][:, None,:]
+
+ # Transform contact directions to camera frame (Don't translate because its a direction vector)
+ # pos_contact_dirs_cam = pos_contact_dirs @ camera_poses[:3,:3].T
+ pos_contact_dirs_cam = torch.matmul(pos_contact_dirs, camera_poses[:, :3,:3].transpose(1, 2))
+
+ # Make finger diffs B x M x 1
+ pos_finger_diffs = pos_finger_diffs[:, :, None]
+
+ # Transform approach directions to camera frame (Don't translate because its a direction vector)
+ # pos_approach_dirs_cam = pos_approach_dirs @ camera_poses[:3,:3].T
+ pos_approach_dirs_cam = torch.matmul(pos_approach_dirs, camera_poses[:, :3,:3].transpose(1, 2))
+
+ # -- Filter Direction -- #
+ # TODO: Figure out what is going on here
+ if filter_z:
+ # Filter out directions that are too far
+ dir_filter_passed = (pos_contact_dirs_cam[:, :, 2:3] > z_val).repeat(1, 1, 3)
+ pos_contact_points_cam = torch.where(dir_filter_passed,
+ pos_contact_points_cam,
+ torch.ones_like(pos_contact_points_cam) * 10000)
+
+ # -- Compute Distances -- #
+ # We want to compute the distance between each point in the point cloud and each contact point
+ # We can do this by expanding the dimensions of the tensors and then summing the squared differences
+ xyz_cam_expanded = torch.unsqueeze(xyz_cam, 2) # B x N x 1 x 3
+ pos_contact_points_cam_expanded = torch.unsqueeze(pos_contact_points_cam, 1) # B x 1 x M x 3
+ squared_dists_all = torch.sum((xyz_cam_expanded - pos_contact_points_cam_expanded)**2, dim=3) # B x N x M
+
+ # B x N x k, B x N x k
+ squared_dists_k, close_contact_pt_idcs = torch.topk(squared_dists_all,
+ k=nsample, dim=2, largest=False, sorted=False)
+
+ # -- Group labels -- #
+ grouped_contact_dirs_cam = utils.index_points(pos_contact_dirs_cam, close_contact_pt_idcs) # B x N x k x 3
+ grouped_finger_diffs = utils.index_points(pos_finger_diffs, close_contact_pt_idcs) # B x N x k x 1
+ grouped_approach_dirs_cam = utils.index_points(pos_approach_dirs_cam, close_contact_pt_idcs) # B x N x k x 3
+
+ # grouped_contact_dirs_cam = pos_contact_dirs_cam[close_contact_pt_idcs, :] # B x N x k x 3
+ # grouped_finger_diffs = pos_finger_diffs[close_contact_pt_idcs] # B x N x k x 1
+ # grouped_approach_dirs_cam = pos_approach_dirs_cam[close_contact_pt_idcs, :] # B x N x k x 3
+
+ # -- Compute Labels -- #
+ # Take mean over k nearest neighbors and normalize
+ dir_label = grouped_contact_dirs_cam.mean(dim=2) # B x N x 3
+ dir_label = F.normalize(dir_label, p=2, dim=2) # B x N x 3
+
+ diff_label = grouped_finger_diffs.mean(dim=2)# B x N x 1
+
+ approach_label = grouped_approach_dirs_cam.mean(dim=2) # B x N x 3
+ approach_label = F.normalize(approach_label, p=2, dim=2) # B x N x 3
+
+ grasp_success_label = torch.mean(squared_dists_k, dim=2, keepdim=True) < radius**2 # B x N x 1
+ grasp_success_label = grasp_success_label.type(torch.float32)
+
+ # debug = dict(
+ # xyz_cam = xyz_cam,
+ # pos_contact_points_cam = pos_contact_points_cam,
+ # )
+ debug = {}
+
+
+ return dir_label, diff_label, grasp_success_label, approach_label, debug
+
+
+
+
+
+
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/checkpoints/contact_graspnet/log_train.txt b/dimos/models/manipulation/contact_graspnet_pytorch/checkpoints/contact_graspnet/log_train.txt
new file mode 100644
index 0000000000..4c6822727e
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/checkpoints/contact_graspnet/log_train.txt
@@ -0,0 +1,4 @@
+Namespace(ckpt_dir='/home/user/Documents/grasp_generation/contact_graspnet_pytorch/checkpoints/contact_graspnet', data_path='/home/user/Documents/grasp_generation/contact_graspnet_pytorch/acronym/', max_epoch=None, batch_size=None, arg_configs=[])
+{'DATA': {'classes': None, 'data_path': '/home/user/Documents/grasp_generation/contact_graspnet_pytorch/acronym/', 'depth_augm': {'clip': 0.005, 'gaussian_kernel': 0, 'sigma': 0.001}, 'gripper_width': 0.08, 'input_normals': False, 'intrinsics': 'realsense', 'labels': {'bin_weights': [0.16652107, 0.21488856, 0.37031708, 0.55618503, 0.75124664, 0.93943357, 1.07824539, 1.19423112, 1.55731375, 3.17161779], 'filter_z': True, 'k': 1, 'max_radius': 0.005, 'num_neg_contacts': 0, 'num_pos_contacts': 8000, 'offset_bins': [0, 0.00794435329, 0.0158887021, 0.0238330509, 0.0317773996, 0.0397217484, 0.0476660972, 0.055610446, 0.0635547948, 0.0714991435, 0.08], 'to_gpu': False, 'z_val': -0.1}, 'ndataset_points': 20000, 'num_point': 2048, 'num_test_scenes': 1000, 'pc_augm': {'clip': 0.005, 'occlusion_dropout_rate': 0.0, 'occlusion_nclusters': 0, 'sigma': 0.0}, 'raw_num_points': 20000, 'scene_contacts_path': 'scene_contacts', 'train_and_test': False, 'train_on_scenes': True, 'use_farthest_point': False, 'use_uniform_quaternions': False, 'view_sphere': {'distance_range': [0.9, 1.3], 'elevation': [30, 150]}}, 'LOSS': {'min_geom_loss_divisor': 1.0, 'offset_loss_type': 'sigmoid_cross_entropy', 'too_small_offset_pred_bin_factor': 0, 'topk_confidence': 512}, 'MODEL': {'asymmetric_model': True, 'bin_offsets': True, 'dir_vec_length_offset': False, 'grasp_conf_head': {'conv1d': 1, 'dropout_keep': 0.5}, 'grasp_dir_head': {'conv1d': 3, 'dropout_keep': 0.7}, 'joint_head': {'conv1d': 4, 'dropout_keep': 0.7}, 'joint_heads': False, 'model': 'contact_graspnet', 'pointnet_fp_modules': [{'mlp': [256, 256]}, {'mlp': [256, 128]}, {'mlp': [128, 128, 128]}], 'pointnet_sa_module': {'group_all': True, 'mlp': [256, 512, 1024]}, 'pointnet_sa_modules_msg': [{'mlp_list': [[32, 32, 64], [64, 64, 128], [64, 96, 128]], 'npoint': 2048, 'nsample_list': [32, 64, 128], 'radius_list': [0.02, 0.04, 0.08]}, {'mlp_list': [[64, 64, 128], [128, 128, 256], [128, 128, 256]], 'npoint': 512, 'nsample_list': [64, 64, 128], 'radius_list': [0.04, 0.08, 0.16]}, {'mlp_list': [[64, 64, 128], [128, 128, 256], [128, 128, 256]], 'npoint': 128, 'nsample_list': [64, 64, 128], 'radius_list': [0.08, 0.16, 0.32]}], 'pred_contact_approach': False, 'pred_contact_base': False, 'pred_contact_offset': True, 'pred_contact_success': True, 'pred_grasps_adds': True, 'pred_grasps_adds_gt2pred': False}, 'OPTIMIZER': {'adds_gt2pred_loss_weight': 1, 'adds_loss_weight': 10, 'approach_cosine_loss_weight': 1, 'backup_every': 200, 'batch_size': 6, 'bn_decay_clip': 0.99, 'bn_decay_decay_rate': 0.5, 'bn_decay_decay_step': 200000, 'bn_init_decay': 0.5, 'checkpoint_every': 100, 'decay_rate': 0.7, 'decay_step': 200000, 'dir_cosine_loss_weight': 1, 'learning_rate': 0.001, 'max_epoch': 200, 'momentum': 0.9, 'offset_loss_weight': 1, 'optimizer': 'adam', 'print_every': 10, 'score_ce_loss_weight': 1, 'val_every': 1}, 'TEST': {'allow_zero_margin': 0, 'bin_vals': 'max', 'center_to_tip': 0.0, 'extra_opening': 0.005, 'filter_thres': 0.0001, 'first_thres': 0.15, 'max_farthest_points': 150, 'num_samples': 200, 'second_thres': 0.15, 'with_replacement': False}}
+pid: 39334
+**** EPOCH 007 ****
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/checkpoints/contact_graspnet/train.py b/dimos/models/manipulation/contact_graspnet_pytorch/checkpoints/contact_graspnet/train.py
new file mode 100644
index 0000000000..0ac758f478
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/checkpoints/contact_graspnet/train.py
@@ -0,0 +1,222 @@
+from genericpath import exists
+import os
+import sys
+import argparse
+from datetime import datetime
+import numpy as np
+import time
+from tqdm import tqdm
+
+CONTACT_DIR = os.path.dirname(os.path.abspath(__file__))
+BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+ROOT_DIR = os.path.dirname(os.path.dirname(BASE_DIR))
+sys.path.append(os.path.join(BASE_DIR))
+sys.path.append(os.path.join(ROOT_DIR))
+sys.path.append(os.path.join(BASE_DIR, 'pointnet2', 'models'))
+sys.path.append(os.path.join(BASE_DIR, 'pointnet2', 'utils'))
+
+import tensorflow.compat.v1 as tf
+tf.disable_eager_execution()
+TF2 = True
+physical_devices = tf.config.experimental.list_physical_devices('GPU')
+tf.config.experimental.set_memory_growth(physical_devices[0], True)
+
+import config_utils
+from data import PointCloudReader, load_scene_contacts, center_pc_convert_cam
+from summaries import build_summary_ops, build_file_writers
+from tf_train_ops import load_labels_and_losses, build_train_op
+from contact_grasp_estimator import GraspEstimator
+
+def train(global_config, log_dir):
+ """
+ Trains Contact-GraspNet
+
+ Arguments:
+ global_config {dict} -- config dict
+ log_dir {str} -- Checkpoint directory
+ """
+
+ contact_infos = load_scene_contacts(global_config['DATA']['data_path'],
+ scene_contacts_path=global_config['DATA']['scene_contacts_path'])
+
+ num_train_samples = len(contact_infos)-global_config['DATA']['num_test_scenes']
+ num_test_samples = global_config['DATA']['num_test_scenes']
+
+ print('using %s meshes' % (num_train_samples + num_test_samples))
+ if 'train_and_test' in global_config['DATA'] and global_config['DATA']['train_and_test']:
+ num_train_samples = num_train_samples + num_test_samples
+ num_test_samples = 0
+ print('using train and test data')
+
+ pcreader = PointCloudReader(
+ root_folder=global_config['DATA']['data_path'],
+ batch_size=global_config['OPTIMIZER']['batch_size'],
+ estimate_normals=global_config['DATA']['input_normals'],
+ raw_num_points=global_config['DATA']['raw_num_points'],
+ use_uniform_quaternions = global_config['DATA']['use_uniform_quaternions'],
+ scene_obj_scales = [c['obj_scales'] for c in contact_infos],
+ scene_obj_paths = [c['obj_paths'] for c in contact_infos],
+ scene_obj_transforms = [c['obj_transforms'] for c in contact_infos],
+ num_train_samples = num_train_samples,
+ num_test_samples = num_test_samples,
+ use_farthest_point = global_config['DATA']['use_farthest_point'],
+ intrinsics=global_config['DATA']['intrinsics'],
+ elevation=global_config['DATA']['view_sphere']['elevation'],
+ distance_range=global_config['DATA']['view_sphere']['distance_range'],
+ pc_augm_config=global_config['DATA']['pc_augm'],
+ depth_augm_config=global_config['DATA']['depth_augm']
+ )
+
+ with tf.Graph().as_default():
+
+ # Build the model
+ grasp_estimator = GraspEstimator(global_config)
+ ops = grasp_estimator.build_network()
+
+ # contact_tensors = load_contact_grasps(contact_infos, global_config['DATA'])
+
+ loss_ops = load_labels_and_losses(grasp_estimator, contact_infos, global_config)
+
+ ops.update(loss_ops)
+ ops['train_op'] = build_train_op(ops['loss'], ops['step'], global_config)
+
+ # Add ops to save and restore all the variables.
+ saver = tf.train.Saver(save_relative_paths=True, keep_checkpoint_every_n_hours=4)
+
+ # Create a session
+ config = tf.ConfigProto()
+ config.gpu_options.allow_growth = True
+ config.allow_soft_placement = True
+ sess = tf.Session(config=config)
+
+ # Log summaries
+ summary_ops = build_summary_ops(ops, sess, global_config)
+
+ # Init/Load weights
+ grasp_estimator.load_weights(sess, saver, log_dir, mode='train')
+
+ # sess = tf_debug.LocalCLIDebugWrapperSession(sess)
+ # sess.add_tensor_filter("has_inf_or_nan", tf_debug.has_inf_or_nan)
+ file_writers = build_file_writers(sess, log_dir)
+
+ ## define: epoch = arbitrary number of views of every training scene
+ cur_epoch = sess.run(ops['step']) // num_train_samples
+ for epoch in range(cur_epoch, global_config['OPTIMIZER']['max_epoch']):
+ log_string('**** EPOCH %03d ****' % (epoch))
+
+ sess.run(ops['iterator'].initializer)
+ epoch_time = time.time()
+ step = train_one_epoch(sess, ops, summary_ops, file_writers, pcreader)
+ log_string('trained epoch {} in: {}'.format(epoch, time.time()-epoch_time))
+
+ # Save the variables to disk.
+ save_path = saver.save(sess, os.path.join(log_dir, "model.ckpt"), global_step=step, write_meta_graph=False)
+ log_string("Model saved in file: %s" % save_path)
+
+ if num_test_samples > 0:
+ eval_time = time.time()
+ eval_validation_scenes(sess, ops, summary_ops, file_writers, pcreader)
+ log_string('evaluation time: {}'.format(time.time()-eval_time))
+
+def train_one_epoch(sess, ops, summary_ops, file_writers, pcreader):
+ """ ops: dict mapping from string to tf ops """
+
+ log_string(str(datetime.now()))
+ loss_log = np.zeros((10,7))
+ get_time = time.time()
+
+ for batch_idx in range(pcreader._num_train_samples):
+
+ batch_data, cam_poses, scene_idx = pcreader.get_scene_batch(scene_idx=batch_idx)
+
+ # OpenCV OpenGL conversion
+ cam_poses, batch_data = center_pc_convert_cam(cam_poses, batch_data)
+
+ feed_dict = {ops['pointclouds_pl']: batch_data, ops['cam_poses_pl']: cam_poses,
+ ops['scene_idx_pl']: scene_idx, ops['is_training_pl']: True}
+
+ step, summary, _, loss_val, dir_loss, bin_ce_loss, \
+ offset_loss, approach_loss, adds_loss, adds_gt2pred_loss, scene_idx = sess.run([ops['step'], summary_ops['merged'], ops['train_op'], ops['loss'], ops['dir_loss'],
+ ops['bin_ce_loss'], ops['offset_loss'], ops['approach_loss'], ops['adds_loss'],
+ ops['adds_gt2pred_loss'], ops['scene_idx']], feed_dict=feed_dict)
+ assert scene_idx[0] == scene_idx
+
+ loss_log[batch_idx%10,:] = loss_val, dir_loss, bin_ce_loss, offset_loss, approach_loss, adds_loss, adds_gt2pred_loss
+
+ if (batch_idx+1)%10 == 0:
+ file_writers['train_writer'].add_summary(summary, step)
+ f = tuple(np.mean(loss_log, axis=0)) + ((time.time() - get_time) / 10., )
+ log_string('total loss: %f \t dir loss: %f \t ce loss: %f \t off loss: %f \t app loss: %f adds loss: %f \t adds_gt2pred loss: %f \t batch time: %f' % f)
+ get_time = time.time()
+
+ return step
+
+def eval_validation_scenes(sess, ops, summary_ops, file_writers, pcreader, max_eval_objects=500):
+ """ ops: dict mapping from string to tf ops """
+ is_training = False
+ log_string(str(datetime.now()))
+ loss_log = np.zeros((min(pcreader._num_test_samples, max_eval_objects),7))
+
+ # resets accumulation of pr and auc data
+ sess.run(summary_ops['pr_reset_op'])
+
+ for batch_idx in np.arange(min(pcreader._num_test_samples, max_eval_objects)):
+
+ batch_data, cam_poses, scene_idx = pcreader.get_scene_batch(scene_idx=pcreader._num_train_samples + batch_idx)
+
+ # OpenCV OpenGL conversion
+ cam_poses, batch_data = center_pc_convert_cam(cam_poses, batch_data)
+
+ feed_dict = {ops['pointclouds_pl']: batch_data, ops['cam_poses_pl']: cam_poses,
+ ops['scene_idx_pl']: scene_idx, ops['is_training_pl']: False}
+
+ scene_idx, step, loss_val, dir_loss, bin_ce_loss, offset_loss, approach_loss, adds_loss, adds_gt2pred_loss, pr_summary,_,_,_ = sess.run([ops['scene_idx'], ops['step'], ops['loss'], ops['dir_loss'], ops['bin_ce_loss'],
+ ops['offset_loss'], ops['approach_loss'], ops['adds_loss'], ops['adds_gt2pred_loss'],
+ summary_ops['merged_eval'], summary_ops['pr_update_op'],
+ summary_ops['auc_update_op']] + [summary_ops['acc_update_ops']], feed_dict=feed_dict)
+ assert scene_idx[0] == (pcreader._num_train_samples + batch_idx)
+
+ loss_log[batch_idx,:] = loss_val, dir_loss, bin_ce_loss, offset_loss, approach_loss, adds_loss, adds_gt2pred_loss
+
+ file_writers['test_writer'].add_summary(pr_summary, step)
+ f = tuple(np.mean(loss_log, axis=0))
+ log_string('mean val loss: %f \t mean val dir loss: %f \t mean val ce loss: %f \t mean off loss: %f \t mean app loss: %f \t mean adds loss: %f \t mean adds_gt2pred loss: %f' % f)
+
+ return step
+
+if __name__ == "__main__":
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--ckpt_dir', default='checkpoints/contact_graspnet', help='Checkpoint dir')
+ parser.add_argument('--data_path', type=str, default=None, help='Grasp data root dir')
+ parser.add_argument('--max_epoch', type=int, default=None, help='Epochs to run')
+ parser.add_argument('--batch_size', type=int, default=None, help='Batch Size during training')
+ parser.add_argument('--arg_configs', nargs="*", type=str, default=[], help='overwrite config parameters')
+ FLAGS = parser.parse_args()
+
+ ckpt_dir = FLAGS.ckpt_dir
+ if not os.path.exists(ckpt_dir):
+ if not os.path.exists(os.path.dirname(ckpt_dir)):
+ ckpt_dir = os.path.join(BASE_DIR, ckpt_dir)
+ os.makedirs(ckpt_dir, exist_ok=True)
+
+ os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
+ os.system('cp {} {}'.format(os.path.join(CONTACT_DIR, 'contact_graspnet.py'), ckpt_dir)) # bkp of model def
+ os.system('cp {} {}'.format(os.path.join(CONTACT_DIR, 'train.py'), ckpt_dir)) # bkp of train procedure
+
+ LOG_FOUT = open(os.path.join(ckpt_dir, 'log_train.txt'), 'w')
+ LOG_FOUT.write(str(FLAGS)+'\n')
+ def log_string(out_str):
+ LOG_FOUT.write(out_str+'\n')
+ LOG_FOUT.flush()
+ print(out_str)
+
+ global_config = config_utils.load_config(ckpt_dir, batch_size=FLAGS.batch_size, max_epoch=FLAGS.max_epoch,
+ data_path= FLAGS.data_path, arg_configs=FLAGS.arg_configs, save=True)
+
+ log_string(str(global_config))
+ log_string('pid: %s'%(str(os.getpid())))
+
+ train(global_config, ckpt_dir)
+
+ LOG_FOUT.close()
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_env.yml b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_env.yml
new file mode 100644
index 0000000000..e2e6bcd167
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_env.yml
@@ -0,0 +1,273 @@
+name: contact_graspnet
+channels:
+ - pytorch
+ - nvidia
+ - conda-forge
+ - anaconda
+ - defaults
+dependencies:
+ - _libgcc_mutex=0.1=main
+ - _openmp_mutex=5.1=1_gnu
+ - anyio=3.5.0=py39h06a4308_0
+ - appdirs=1.4.4=pyhd3eb1b0_0
+ - argon2-cffi=20.1.0=py39h27cfd23_1
+ - asttokens=2.0.5=pyhd3eb1b0_0
+ - attrs=22.1.0=py39h06a4308_0
+ - backcall=0.2.0=pyhd3eb1b0_0
+ - beautifulsoup4=4.11.1=py39h06a4308_0
+ - binutils_impl_linux-64=2.36.1=h193b22a_2
+ - binutils_linux-64=2.36=hf3e587d_1
+ - blas=1.0=mkl
+ - bleach=4.1.0=pyhd3eb1b0_0
+ - bottleneck=1.3.5=py39h7deecbd_0
+ - brotlipy=0.7.0=py39h27cfd23_1003
+ - bzip2=1.0.8=h7b6447c_0
+ - ca-certificates=2023.01.10=h06a4308_0
+ - certifi=2022.12.7=py39h06a4308_0
+ - cffi=1.15.1=py39h5eee18b_3
+ - charset-normalizer=2.0.4=pyhd3eb1b0_0
+ - colorama=0.4.6=py39h06a4308_0
+ - comm=0.1.2=py39h06a4308_0
+ - cryptography=39.0.1=py39h9ce1e76_2
+ - cuda-cudart=11.7.99=0
+ - cuda-cupti=11.7.101=0
+ - cuda-libraries=11.7.1=0
+ - cuda-nvrtc=11.7.99=0
+ - cuda-nvtx=11.7.91=0
+ - cuda-runtime=11.7.1=0
+ - dataclasses=0.8=pyh6d0b6a4_7
+ - debugpy=1.5.1=py39h295c915_0
+ - decorator=5.1.1=pyhd3eb1b0_0
+ - defusedxml=0.7.1=pyhd3eb1b0_0
+ - entrypoints=0.4=py39h06a4308_0
+ - executing=0.8.3=pyhd3eb1b0_0
+ - expat=2.4.9=h6a678d5_0
+ - ffmpeg=4.2.2=h20bf706_0
+ - filelock=3.9.0=py39h06a4308_0
+ - font-ttf-dejavu-sans-mono=2.37=hd3eb1b0_0
+ - font-ttf-inconsolata=2.001=hcb22688_0
+ - font-ttf-source-code-pro=2.030=hd3eb1b0_0
+ - font-ttf-ubuntu=0.83=h8b1ccd4_0
+ - fontconfig=2.14.1=h4c34cd2_2
+ - fonts-anaconda=1=h8fa9717_0
+ - freetype=2.12.1=h4a9f257_0
+ - freetype-py=2.2.0=pyhd3eb1b0_0
+ - future=0.18.3=py39h06a4308_0
+ - gcc_impl_linux-64=11.1.0=h6b5115b_8
+ - gcc_linux-64=11.1.0=h97fdae6_1
+ - giflib=5.2.1=h5eee18b_3
+ - gmp=6.2.1=h295c915_3
+ - gmpy2=2.1.2=py39heeb90bb_0
+ - gnutls=3.6.15=he1e5248_0
+ - gxx_impl_linux-64=11.1.0=h6b5115b_8
+ - gxx_linux-64=11.1.0=h33c4e4b_1
+ - h5py=3.7.0=py39h737f45e_0
+ - hdf5=1.10.6=h3ffc7dd_1
+ - icu=58.2=he6710b0_3
+ - idna=3.4=py39h06a4308_0
+ - imageio=2.26.0=py39h06a4308_0
+ - intel-openmp=2023.1.0=hdb19cb5_46305
+ - ipykernel=6.19.2=py39hb070fc8_0
+ - ipython=8.8.0=py39h06a4308_0
+ - ipython_genutils=0.2.0=pyhd3eb1b0_1
+ - ipywidgets=7.6.5=pyhd3eb1b0_1
+ - jedi=0.18.1=py39h06a4308_1
+ - jinja2=3.1.2=py39h06a4308_0
+ - jpeg=9e=h5eee18b_1
+ - jsonschema=4.16.0=py39h06a4308_0
+ - jupyter_client=7.4.8=py39h06a4308_0
+ - jupyter_core=5.1.1=py39h06a4308_0
+ - jupyter_server=1.23.4=py39h06a4308_0
+ - jupyterlab_pygments=0.1.2=py_0
+ - jupyterlab_widgets=1.0.0=pyhd3eb1b0_1
+ - kaleido-core=0.2.1=h7c8854e_0
+ - kernel-headers_linux-64=2.6.32=he073ed8_15
+ - lame=3.100=h7b6447c_0
+ - lcms2=2.12=h3be6417_0
+ - ld_impl_linux-64=2.36.1=hea4e1c9_2
+ - lerc=3.0=h295c915_0
+ - libcublas=11.10.3.66=0
+ - libcufft=10.7.2.124=h4fbf590_0
+ - libcufile=1.6.1.9=0
+ - libcurand=10.3.2.106=0
+ - libcusolver=11.4.0.1=0
+ - libcusparse=11.7.4.91=0
+ - libdeflate=1.17=h5eee18b_0
+ - libffi=3.4.4=h6a678d5_0
+ - libgcc-devel_linux-64=11.1.0=h80e7780_8
+ - libgcc-ng=11.2.0=h1234567_1
+ - libgfortran-ng=11.2.0=h00389a5_1
+ - libgfortran5=11.2.0=h1234567_1
+ - libgomp=11.2.0=h1234567_1
+ - libidn2=2.3.4=h5eee18b_0
+ - libnpp=11.7.4.75=0
+ - libnvjpeg=11.8.0.2=0
+ - libopus=1.3.1=h7b6447c_0
+ - libpng=1.6.39=h5eee18b_0
+ - libsanitizer=11.1.0=h56837e0_8
+ - libsodium=1.0.18=h7b6447c_0
+ - libstdcxx-devel_linux-64=11.1.0=h80e7780_8
+ - libstdcxx-ng=11.2.0=h1234567_1
+ - libtasn1=4.19.0=h5eee18b_0
+ - libtiff=4.5.0=h6a678d5_2
+ - libunistring=0.9.10=h27cfd23_0
+ - libuuid=1.41.5=h5eee18b_0
+ - libvpx=1.7.0=h439df22_0
+ - libwebp=1.2.4=h11a3e52_1
+ - libwebp-base=1.2.4=h5eee18b_1
+ - libxml2=2.10.3=hcbfbd50_0
+ - lz4-c=1.9.4=h6a678d5_0
+ - markupsafe=2.1.1=py39h7f8727e_0
+ - mathjax=2.7.5=h06a4308_0
+ - matplotlib-inline=0.1.6=py39h06a4308_0
+ - mistune=0.8.4=py39h27cfd23_1000
+ - mkl=2023.1.0=h6d00ec8_46342
+ - mkl-service=2.4.0=py39h5eee18b_1
+ - mkl_fft=1.3.6=py39h417a72b_1
+ - mkl_random=1.2.2=py39h417a72b_1
+ - mpc=1.1.0=h10f8cd9_1
+ - mpfr=4.0.2=hb69a4c5_1
+ - mpmath=1.2.1=py39h06a4308_0
+ - nbclassic=0.4.8=py39h06a4308_0
+ - nbclient=0.5.13=py39h06a4308_0
+ - nbconvert=6.4.4=py39h06a4308_0
+ - nbformat=5.7.0=py39h06a4308_0
+ - ncurses=6.4=h6a678d5_0
+ - nest-asyncio=1.5.6=py39h06a4308_0
+ - nettle=3.7.3=hbbd107a_1
+ - networkx=2.8.4=py39h06a4308_1
+ - notebook=6.5.2=py39h06a4308_0
+ - notebook-shim=0.2.2=py39h06a4308_0
+ - nspr=4.35=h6a678d5_0
+ - nss=3.89.1=h6a678d5_0
+ - numexpr=2.8.4=py39hc78ab66_1
+ - numpy=1.24.3=py39hf6e8229_1
+ - numpy-base=1.24.3=py39h060ed82_1
+ - openh264=2.1.1=h4ff587b_0
+ - openssl=3.0.8=h7f8727e_0
+ - packaging=23.0=py39h06a4308_0
+ - pandas=1.5.2=py39h417a72b_0
+ - pandocfilters=1.5.0=pyhd3eb1b0_0
+ - parso=0.8.3=pyhd3eb1b0_0
+ - pexpect=4.8.0=pyhd3eb1b0_3
+ - pickleshare=0.7.5=pyhd3eb1b0_1003
+ - pillow=9.4.0=py39h6a678d5_0
+ - pip=23.1.2=py39h06a4308_0
+ - platformdirs=2.5.2=py39h06a4308_0
+ - plotly=5.9.0=py39h06a4308_0
+ - pooch=1.4.0=pyhd3eb1b0_0
+ - prometheus_client=0.14.1=py39h06a4308_0
+ - prompt-toolkit=3.0.36=py39h06a4308_0
+ - psutil=5.9.0=py39h5eee18b_0
+ - ptyprocess=0.7.0=pyhd3eb1b0_2
+ - pure_eval=0.2.2=pyhd3eb1b0_0
+ - pycparser=2.21=pyhd3eb1b0_0
+ - pyglet=1.5.27=py39hf3d152e_3
+ - pygments=2.11.2=pyhd3eb1b0_0
+ - pyopengl=3.1.1a1=py39h06a4308_0
+ - pyopenssl=23.0.0=py39h06a4308_0
+ - pyrender=0.1.45=pyh8a188c0_3
+ - pyrsistent=0.18.0=py39heee7806_0
+ - pysocks=1.7.1=py39h06a4308_0
+ - python=3.9.16=h955ad1f_3
+ - python-dateutil=2.8.2=pyhd3eb1b0_0
+ - python-fastjsonschema=2.16.2=py39h06a4308_0
+ - python-kaleido=0.2.1=pyhd8ed1ab_0
+ - python_abi=3.9=2_cp39
+ - pytorch=2.0.1=py3.9_cuda11.7_cudnn8.5.0_0
+ - pytorch-cuda=11.7=h778d358_5
+ - pytorch-mutex=1.0=cuda
+ - pytz=2022.7=py39h06a4308_0
+ - pyyaml=6.0=py39hb9d737c_4
+ - pyzmq=23.2.0=py39h6a678d5_0
+ - readline=8.2=h5eee18b_0
+ - requests=2.29.0=py39h06a4308_0
+ - scipy=1.10.1=py39hf6e8229_1
+ - send2trash=1.8.0=pyhd3eb1b0_1
+ - setuptools=67.8.0=py39h06a4308_0
+ - six=1.16.0=pyhd3eb1b0_1
+ - sniffio=1.2.0=py39h06a4308_1
+ - soupsieve=2.3.2.post1=py39h06a4308_0
+ - sqlite=3.41.2=h5eee18b_0
+ - stack_data=0.2.0=pyhd3eb1b0_0
+ - sympy=1.11.1=py39h06a4308_0
+ - sysroot_linux-64=2.12=he073ed8_15
+ - tbb=2021.8.0=hdb19cb5_0
+ - tenacity=8.2.2=py39h06a4308_0
+ - terminado=0.17.1=py39h06a4308_0
+ - testpath=0.6.0=py39h06a4308_0
+ - tk=8.6.12=h1ccaba5_0
+ - torchaudio=2.0.2=py39_cu117
+ - torchtriton=2.0.0=py39
+ - torchvision=0.15.2=py39_cu117
+ - tornado=6.2=py39h5eee18b_0
+ - tqdm=4.65.0=pyhd8ed1ab_1
+ - traitlets=5.7.1=py39h06a4308_0
+ - trimesh=3.22.0=pyhd8ed1ab_0
+ - typing_extensions=4.6.3=py39h06a4308_0
+ - tzdata=2023c=h04d1e81_0
+ - urllib3=1.26.16=py39h06a4308_0
+ - wcwidth=0.2.5=pyhd3eb1b0_0
+ - webencodings=0.5.1=py39h06a4308_1
+ - websocket-client=0.58.0=py39h06a4308_4
+ - wheel=0.38.4=py39h06a4308_0
+ - widgetsnbextension=3.5.2=py39h06a4308_0
+ - x264=1!157.20191217=h7b6447c_0
+ - xz=5.4.2=h5eee18b_0
+ - yaml=0.2.5=h7b6447c_0
+ - zeromq=4.3.4=h2531618_0
+ - zipp=3.15.0=pyhd8ed1ab_0
+ - zlib=1.2.13=h5eee18b_0
+ - zstd=1.5.5=hc292b87_0
+ - pip:
+ - absl-py==1.4.0
+ - addict==2.4.0
+ - apptools==5.2.0
+ - cachetools==5.3.1
+ - click==8.1.3
+ - configargparse==1.5.3
+ - configobj==5.0.8
+ - contourpy==1.1.0
+ - cycler==0.11.0
+ - cython==0.29.35
+ - dash==2.10.2
+ - dash-core-components==2.0.0
+ - dash-html-components==2.0.0
+ - dash-table==5.0.0
+ - envisage==7.0.3
+ - flask==2.2.5
+ - fonttools==4.40.0
+ - google-auth==2.20.0
+ - google-auth-oauthlib==1.0.0
+ - grpcio==1.54.2
+ - importlib-metadata==6.7.0
+ - importlib-resources==5.12.0
+ - itsdangerous==2.1.2
+ - joblib==1.2.0
+ - kiwisolver==1.4.4
+ - markdown==3.4.3
+ - matplotlib==3.7.1
+ - mayavi==4.8.1
+ - oauthlib==3.2.2
+ - open3d==0.17.0
+ - opencv-python==4.7.0.72
+ - protobuf==4.23.3
+ - pyasn1==0.5.0
+ - pyasn1-modules==0.3.0
+ - pyface==8.0.0
+ - pyparsing==3.0.9
+ - pyquaternion==0.9.9
+ - python-fcl==0.7.0.4
+ - requests-oauthlib==1.3.1
+ - rsa==4.9
+ - scikit-learn==1.2.2
+ - tensorboard==2.13.0
+ - tensorboard-data-server==0.7.1
+ - tensorboardx==2.6.1
+ - threadpoolctl==3.1.0
+ - traits==6.4.1
+ - traitsui==8.0.0
+ - vtk==9.2.6
+ - werkzeug==2.2.3
+ - rtree
+prefix: /home/user/miniconda3/envs/c_graspnet_torch
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/__init__.py b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/__init__.py
new file mode 100644
index 0000000000..38f345375f
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/__init__.py
@@ -0,0 +1,9 @@
+"""ContactGraspNet PyTorch implementation - core package."""
+
+# Import key modules for easier access
+from contact_graspnet_pytorch.contact_graspnet import ContactGraspnet
+from contact_graspnet_pytorch.contact_grasp_estimator import GraspEstimator
+from contact_graspnet_pytorch.inference import inference
+
+# This will help identify the module when imported
+__package_name__ = 'contact_graspnet_pytorch'
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/acronym_dataloader.py b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/acronym_dataloader.py
new file mode 100644
index 0000000000..439c2b975e
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/acronym_dataloader.py
@@ -0,0 +1,623 @@
+import os
+import sys
+import random
+
+BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+sys.path.append(os.path.join(BASE_DIR))
+sys.path.append(os.path.join(BASE_DIR, 'Pointnet_Pointnet2_pytorch'))
+
+from PIL import Image
+import argparse
+import numpy as np
+import copy
+import cv2
+import glob
+import trimesh.transformations as tra
+from scipy.spatial import cKDTree
+
+import provider
+from contact_graspnet_pytorch.scene_renderer import SceneRenderer
+
+import torch
+import torch.nn.functional as F
+from torch.utils.data import Dataset
+
+from contact_graspnet_pytorch import utils
+"""
+Missing objects:
+
+/home/user/Documents/grasp_generation/contact_graspnet_pytorch/acronym/meshes/47fc4a2417ca259f894dbff6c6a6be89.obj
+
+/home/user/Documents/grasp_generation/contact_graspnet_pytorch/acronym/meshes/49a0a84ee5a91344b11c5cce13f76151.obj
+
+/home/user/Documents/grasp_generation/contact_graspnet_pytorch/acronym/meshes/feb146982d0c64dfcbf4f3f04bbad8.obj
+
+/home/user/Documents/grasp_generation/contact_graspnet_pytorch/acronym/meshes/8123f469a08a88e7761dc3477e65a72.obj
+
+
+Skipping object (simplify failed): 330880146fd858e2cb6782d0f0acad78
+Skipping object (simplify failed): a24e0ea758d43657a5e3e028709e0474
+Skipping object (simplify failed): 37d510f7958b941b9163cfcfcc029614
+Skipping object (simplify failed): d25b80f268f5731449c3792a0dc29860
+"""
+
+class AcryonymDataset(Dataset):
+ """
+ Class to load scenes, render point clouds and augment them during training
+
+ Arguments:
+ root_folder {str} -- acronym root folder
+ batch_size {int} -- number of rendered point clouds per-batch
+
+ Keyword Arguments:
+ raw_num_points {int} -- Number of random/farthest point samples per scene (default: {20000})
+ estimate_normals {bool} -- compute normals from rendered point cloud (default: {False})
+ caching {bool} -- cache scenes in memory (default: {True})
+ use_uniform_quaternions {bool} -- use uniform quaternions for camera sampling (default: {False})
+ scene_obj_scales {list} -- object scales in scene (default: {None})
+ scene_obj_paths {list} -- object paths in scene (default: {None})
+ scene_obj_transforms {np.ndarray} -- object transforms in scene (default: {None})
+ num_train_samples {int} -- training scenes (default: {None})
+ num_test_samples {int} -- test scenes (default: {None})
+ use_farthest_point {bool} -- use farthest point sampling to reduce point cloud dimension (default: {False})
+ intrinsics {str} -- intrinsics to for rendering depth maps (default: {None})
+ distance_range {tuple} -- distance range from camera to center of table (default: {(0.9,1.3)})
+ elevation {tuple} -- elevation range (90 deg is top-down) (default: {(30,150)})
+ pc_augm_config {dict} -- point cloud augmentation config (default: {None})
+ depth_augm_config {dict} -- depth map augmentation config (default: {None})
+ """
+ def __init__(self, global_config, debug=False, train=True, device=None, use_saved_renders=False):
+ if device is None:
+ self.device = torch.device('cpu')
+ else:
+ self.device = device
+
+ self.train = train
+ self.use_saved_renders = use_saved_renders
+
+ # if use_saved_renders:
+ if not os.path.exists(os.path.join(global_config['DATA']['data_path'], 'renders')):
+ os.mkdir(os.path.join(global_config['DATA']['data_path'], 'renders'))
+
+ # TODO: Add train vs test split
+ # -- Index data -- #
+ self.scene_contacts_dir, self.valid_scene_contacts = \
+ self._check_scene_contacts(global_config['DATA']['data_path'],
+ global_config['DATA']['scene_contacts_path'],
+ debug=debug)
+
+ self._num_test_scenes = global_config['DATA']['num_test_scenes']
+ self._num_train_scenes = len(self.valid_scene_contacts) - self._num_test_scenes
+
+ if 'train_and_test' in global_config['DATA'] and global_config['DATA']['train_and_test']:
+ self._num_train_samples = self._num_train_samples + self._num_test_samples
+ self._num_test_samples = 0
+ print('using train and test data')
+ print('using %s meshes' % (self._num_train_scenes + self._num_test_scenes))
+
+
+ # -- Grab configs -- #
+ self.global_config = global_config
+
+ self._root_folder = global_config['DATA']['data_path']
+
+ self._batch_size = global_config['OPTIMIZER']['batch_size']
+ self._raw_num_points = global_config['DATA']['raw_num_points']
+ self._caching = True
+
+
+
+ self._estimate_normals = global_config['DATA']['input_normals']
+ self._use_farthest_point = global_config['DATA']['use_farthest_point']
+ # self._scene_obj_scales = [c['obj_scales'] for c in contact_infos]
+ # self._scene_obj_paths = [c['obj_paths'] for c in contact_infos]
+ # self._scene_obj_transforms = [c['obj_transforms'] for c in contact_infos]
+ self._distance_range = global_config['DATA']['view_sphere']['distance_range']
+ self._pc_augm_config = global_config['DATA']['pc_augm']
+ self._depth_augm_config = global_config['DATA']['depth_augm']
+ self._return_segmap = False
+
+ use_uniform_quaternions = global_config['DATA']['use_uniform_quaternions']
+ intrinsics=global_config['DATA']['intrinsics']
+ elevation=global_config['DATA']['view_sphere']['elevation']
+
+ self._current_pc = None
+ self._cache = {}
+
+ self._renderer = SceneRenderer(caching=True, intrinsics=intrinsics)
+
+ if use_uniform_quaternions:
+ raise NotImplementedError
+ quat_path = os.path.join(self._root_folder, 'uniform_quaternions/data2_4608.qua')
+ quaternions = [l[:-1].split('\t') for l in open(quat_path, 'r').readlines()]
+
+ quaternions = [[float(t[0]),
+ float(t[1]),
+ float(t[2]),
+ float(t[3])] for t in quaternions]
+ quaternions = np.asarray(quaternions)
+ quaternions = np.roll(quaternions, 1, axis=1)
+ self._all_poses = [tra.quaternion_matrix(q) for q in quaternions]
+ else:
+ self._cam_orientations = []
+ self._elevation = np.array(elevation)/180.
+ for az in np.linspace(0, np.pi * 2, 30):
+ for el in np.linspace(self._elevation[0], self._elevation[1], 30):
+ self._cam_orientations.append(tra.euler_matrix(0, -el, az))
+ self._coordinate_transform = tra.euler_matrix(np.pi/2, 0, 0).dot(tra.euler_matrix(0, np.pi/2, 0))
+
+ self._num_renderable_cam_poses = len(self._cam_orientations)
+ self._num_saved_cam_poses = 200 # TODO: Make this a config
+
+
+ def __getitem__(self, index):
+ """
+ Loads a scene and (potentially) renders a point cloud.
+
+ Data is indexed as follows:
+ For each scene, we have a set of camera poses. The scene index is
+ index // num_cam_poses, the camera pose index is index % num_cam_poses
+
+ Arguments:
+ index {int} -- scene_pose_index.
+
+ Returns:
+ dict -- data dictionary"""
+
+ # Offset index to skip train samples
+ if not self.train:
+ index = index + self._num_train_scenes
+
+ scene_index = index
+ cam_pose_index = random.randint(0, self._num_saved_cam_poses)
+
+ if self._estimate_normals:
+ raise NotImplementedError
+
+ if self._return_segmap:
+ raise NotImplementedError
+
+ estimate_normals = False
+
+ # -- Load Scene Information -- #
+ # scene_index = index // self._num_cam_poses
+ # cam_pose_index = index % self._num_cam_poses
+ scene_id = self.valid_scene_contacts[scene_index]
+
+ scene_info = self._load_contacts(scene_id)
+
+ scene_contact_points = scene_info['scene_contact_points']
+ obj_paths = scene_info['obj_paths']
+ obj_transforms = scene_info['obj_transforms']
+ obj_scales = scene_info['obj_scales']
+ grasp_transforms = scene_info['grasp_transforms']
+
+ # -- Load Scene -- #
+ if self.use_saved_renders:
+ cam_pose_id = f'{cam_pose_index:03}'
+ scene_dir = os.path.join(self._root_folder, 'renders', scene_id)
+ render_path = os.path.join(scene_dir, f'{cam_pose_id}.npz')
+ if os.path.exists(render_path):
+ try:
+ pc_cam, camera_pose = self.load_scene(render_path)
+ pc_normals = np.array([])
+ depth = np.array([])
+ except:
+ print('Loading error, re-rendering')
+ # Remove and re-render if the file is corrupted
+ os.remove(render_path)
+ return self.__getitem__(index)
+ else:
+ if not os.path.exists(scene_dir):
+ os.mkdir(scene_dir)
+ # TODO: Fix this in the paths
+ # for i in range(obj_paths.shape[0]):
+ # parts = obj_paths[i].split('/')
+ # obj_paths[i] = os.path.join(*parts[0:-2], parts[-1])
+
+ # -- Build and Render Scene -- #
+ obj_paths = [os.path.join(self._root_folder, p) for p in obj_paths]
+ try:
+ self.change_scene(obj_paths, obj_scales, obj_transforms, visualize=False)
+ except:
+ print('Error loading scene %s' % scene_id)
+ return self.__getitem__(self._generate_new_idx())
+ pc_cam, pc_normals, camera_pose, depth = self.render_scene(estimate_normals=estimate_normals, augment=False)
+ camera_pose, pc_cam = self._center_pc_convert_cam(camera_pose, pc_cam)
+ np.savez_compressed(render_path, pc_cam=pc_cam, camera_pose=camera_pose)
+ pc_cam = self._augment_pc(pc_cam)
+
+ # -- Or Render Scene without Saving -- #
+ else:
+ # TODO: Fix this in the paths
+ # for i in range(obj_paths.shape[0]):
+ # parts = obj_paths[i].split('/')
+ # obj_paths[i] = os.path.join(*parts[0:-2], parts[-1])
+
+ # -- Build and Render Scene -- #
+ obj_paths = [os.path.join(self._root_folder, p) for p in obj_paths]
+ try:
+ self.change_scene(obj_paths, obj_scales, obj_transforms, visualize=False)
+ except:
+ print('Error loading scene %s' % scene_id)
+ return self.__getitem__(self._generate_new_idx())
+ pc_cam, pc_normals, camera_pose, depth = self.render_scene(estimate_normals=estimate_normals)
+
+ # Convert from OpenGL to OpenCV Coordinates
+ camera_pose, pc_cam = self._center_pc_convert_cam(camera_pose, pc_cam)
+
+ pc_cam = torch.from_numpy(pc_cam).type(torch.float32)
+ pc_normals = torch.tensor(pc_normals).type(torch.float32) # Rn this is []
+ camera_pose = torch.from_numpy(camera_pose).type(torch.float32)
+ depth = torch.from_numpy(depth).type(torch.float32)
+
+ try:
+ pos_contact_points, pos_contact_dirs, pos_finger_diffs, \
+ pos_approach_dirs = self._process_contacts(scene_contact_points, grasp_transforms)
+ except ValueError:
+ print('No positive contacts found')
+ print('Scene id: %s' % scene_id)
+ return self.__getitem__(self._generate_new_idx())
+
+
+ pos_contact_points = pos_contact_points
+ pos_contact_dirs = pos_contact_dirs
+ pos_finger_diffs = pos_finger_diffs
+ pos_approach_dirs = pos_approach_dirs
+
+ data = dict(
+ pc_cam=pc_cam,
+ camera_pose=camera_pose,
+ pos_contact_points=pos_contact_points,
+ pos_contact_dirs=pos_contact_dirs,
+ pos_finger_diffs=pos_finger_diffs,
+ pos_approach_dirs=pos_approach_dirs,
+ )
+
+ return data
+
+ def __len__(self):
+ """
+ Returns the number of rendered scenes in the dataset.
+
+ Returns:
+ int -- self._num_train_scenes * len(self._cam_orientations)
+ """
+ if self.train:
+ return self._num_train_scenes
+ else:
+ return self._num_test_scenes
+
+ def _generate_new_idx(self):
+ """
+ Randomly generates a new index for the dataset.
+
+ Used if the current index is invalid (e.g. no positive contacts or failed to load)
+ """
+ if self.train:
+ return torch.randint(self._num_train_scenes, (1,))[0]
+ else:
+ return torch.randint(self._num_test_scenes, (1,))[0]
+
+ def _center_pc_convert_cam(self, cam_pose, point_clouds):
+ """
+ Converts from OpenGL to OpenCV coordinates, computes inverse of camera pose and centers point cloud
+
+ :param cam_poses: (bx4x4) Camera poses in OpenGL format
+ :param batch_data: (bxNx3) point clouds
+ :returns: (cam_poses, batch_data) converted
+ """
+
+ # OpenCV OpenGL conversion
+ cam_pose[:3, 1] = -cam_pose[:3, 1]
+ cam_pose[:3, 2] = -cam_pose[:3, 2]
+ cam_pose = utils.inverse_transform(cam_pose)
+
+ pc_mean = np.mean(point_clouds, axis=0, keepdims=True)
+ point_clouds[:,:3] -= pc_mean[:,:3]
+ cam_pose[:3,3] -= pc_mean[0,:3]
+
+ return cam_pose, point_clouds
+
+ def _check_scene_contacts(self, dataset_folder, scene_contacts_path='scene_contacts_new', debug=False):
+ """
+ Attempt to load each scene contact in the scene_contacts_path directory.
+ Returns a list of all valid paths
+
+ Arguments:
+ dataset_folder {str} -- path to dataset folder
+ scene_contacts_path {str} -- path to scene contacts
+
+ Returns:
+ list(str) -- list of valid scene contact paths
+ """
+ scene_contacts_dir = os.path.join(dataset_folder, scene_contacts_path)
+ scene_contacts_paths = sorted(glob.glob(os.path.join(scene_contacts_dir, '*')))
+ valid_scene_contacts = []
+
+ # Faster
+ corrupt_list = [
+ # Won't load
+ '001780',
+ '002100',
+ '002486',
+ '004861',
+ '004866',
+ '007176',
+ '008283',
+
+ # Newer won't load
+ '005723'
+ '001993',
+ '001165',
+ '006828',
+ '007238',
+ '008980',
+ '005623',
+ '001993',
+
+ # No contacts
+ '008509',
+ '007996',
+ '009296',
+ ]
+
+ for contact_path in scene_contacts_paths:
+ if contact_path.split('/')[-1].split('.')[0] in corrupt_list:
+ continue
+ contact_id = contact_path.split('/')[-1].split('.')[0]
+ valid_scene_contacts.append(contact_id)
+
+ return scene_contacts_dir, valid_scene_contacts
+
+ def _load_contacts(self, scene_id):
+ """
+ Loads contact information for a given scene
+ """
+ return np.load(os.path.join(self.scene_contacts_dir, scene_id + '.npz'), allow_pickle=True)
+
+ def _process_contacts(self, scene_contact_points, grasp_transforms):
+ """
+ Processes contact information for a given scene
+
+ num_contacts may not be the same for each scene. We resample to make this
+ the same
+
+ Arguments:
+ scene_contact_points {np.ndarray} -- (num_contacts, 2, 3) array of contact points
+ grasp_transforms {np.ndarray} -- (num_contacts, 4, 4) array of grasp transforms
+
+ Returns:
+ Scene data
+ """
+ num_pos_contacts = self.global_config['DATA']['labels']['num_pos_contacts']
+
+ contact_directions_01 = scene_contact_points[:,0,:] - scene_contact_points[:,1,:]
+ all_contact_points = scene_contact_points.reshape(-1, 3)
+ all_finger_diffs = np.maximum(np.linalg.norm(contact_directions_01,axis=1), np.finfo(np.float32).eps)
+ all_contact_directions = np.empty((contact_directions_01.shape[0]*2, contact_directions_01.shape[1],))
+ all_contact_directions[0::2] = -contact_directions_01 / all_finger_diffs[:,np.newaxis]
+ all_contact_directions[1::2] = contact_directions_01 / all_finger_diffs[:,np.newaxis]
+ all_contact_suc = np.ones_like(all_contact_points[:,0])
+ all_grasp_transform = grasp_transforms.reshape(-1,4,4)
+ all_approach_directions = all_grasp_transform[:,:3,2]
+
+ pos_idcs = np.where(all_contact_suc>0)[0]
+ if len(pos_idcs) == 0:
+ raise ValueError('No positive contacts found')
+ # print('total positive contact points ', len(pos_idcs))
+
+ all_pos_contact_points = all_contact_points[pos_idcs]
+ all_pos_finger_diffs = all_finger_diffs[pos_idcs//2]
+ all_pos_contact_dirs = all_contact_directions[pos_idcs]
+ all_pos_approach_dirs = all_approach_directions[pos_idcs//2]
+
+ # -- Sample Positive Contacts -- #
+ # Use all positive contacts then mesh_utils with replacement
+ if num_pos_contacts > len(all_pos_contact_points)/2:
+ pos_sampled_contact_idcs = np.arange(len(all_pos_contact_points))
+ pos_sampled_contact_idcs_replacement = np.random.choice(
+ np.arange(len(all_pos_contact_points)),
+ num_pos_contacts*2 - len(all_pos_contact_points),
+ replace=True)
+ pos_sampled_contact_idcs= np.hstack((pos_sampled_contact_idcs,
+ pos_sampled_contact_idcs_replacement))
+ else:
+ pos_sampled_contact_idcs = np.random.choice(
+ np.arange(len(all_pos_contact_points)),
+ num_pos_contacts*2,
+ replace=False)
+
+ pos_contact_points = torch.from_numpy(all_pos_contact_points[pos_sampled_contact_idcs,:]).type(torch.float32)
+
+ pos_contact_dirs = torch.from_numpy(all_pos_contact_dirs[pos_sampled_contact_idcs,:]).type(torch.float32)
+ pos_contact_dirs = F.normalize(pos_contact_dirs, p=2, dim=1)
+
+ pos_finger_diffs = torch.from_numpy(all_pos_finger_diffs[pos_sampled_contact_idcs]).type(torch.float32)
+
+ pos_approach_dirs = torch.from_numpy(all_pos_approach_dirs[pos_sampled_contact_idcs]).type(torch.float32)
+ pos_approach_dirs = F.normalize(pos_approach_dirs, p=2, dim=1)
+
+ return pos_contact_points, pos_contact_dirs, pos_finger_diffs, pos_approach_dirs
+ # return tf_pos_contact_points, tf_pos_contact_dirs, tf_pos_contact_approaches, tf_pos_finger_diffs, tf_scene_idcs
+
+ def get_cam_pose(self, cam_orientation):
+ """
+ Samples camera pose on shell around table center
+
+ Arguments:
+ cam_orientation {np.ndarray} -- 3x3 camera orientation matrix
+
+ Returns:
+ [np.ndarray] -- 4x4 homogeneous camera pose
+ """
+
+ distance = self._distance_range[0] + np.random.rand()*(self._distance_range[1]-self._distance_range[0])
+
+ extrinsics = np.eye(4)
+ extrinsics[0, 3] += distance
+ extrinsics = cam_orientation.dot(extrinsics)
+
+ cam_pose = extrinsics.dot(self._coordinate_transform)
+ # table height
+ cam_pose[2,3] += self._renderer._table_dims[2]
+ cam_pose[:3,:2]= -cam_pose[:3,:2]
+ return cam_pose
+
+ def _augment_pc(self, pc):
+ """
+ Augments point cloud with jitter and dropout according to config
+
+ Arguments:
+ pc {np.ndarray} -- Nx3 point cloud
+
+ Returns:
+ np.ndarray -- augmented point cloud
+ """
+
+ # not used because no artificial occlusion
+ if 'occlusion_nclusters' in self._pc_augm_config and self._pc_augm_config['occlusion_nclusters'] > 0:
+ pc = self.apply_dropout(pc,
+ self._pc_augm_config['occlusion_nclusters'],
+ self._pc_augm_config['occlusion_dropout_rate'])
+
+ if 'sigma' in self._pc_augm_config and self._pc_augm_config['sigma'] > 0:
+ pc = provider.jitter_point_cloud(pc[np.newaxis, :, :],
+ sigma=self._pc_augm_config['sigma'],
+ clip=self._pc_augm_config['clip'])[0]
+
+
+ return pc[:,:3]
+
+ def _augment_depth(self, depth):
+ """
+ Augments depth map with z-noise and smoothing according to config
+
+ Arguments:
+ depth {np.ndarray} -- depth map
+
+ Returns:
+ np.ndarray -- augmented depth map
+ """
+
+ if 'sigma' in self._depth_augm_config and self._depth_augm_config['sigma'] > 0:
+ clip = self._depth_augm_config['clip']
+ sigma = self._depth_augm_config['sigma']
+ noise = np.clip(sigma*np.random.randn(*depth.shape), -clip, clip)
+ depth += noise
+ if 'gaussian_kernel' in self._depth_augm_config and self._depth_augm_config['gaussian_kernel'] > 0:
+ kernel = self._depth_augm_config['gaussian_kernel']
+ depth_copy = depth.copy()
+ depth = cv2.GaussianBlur(depth,(kernel,kernel),0)
+ depth[depth_copy==0] = depth_copy[depth_copy==0]
+
+ return depth
+
+ def apply_dropout(self, pc, occlusion_nclusters, occlusion_dropout_rate):
+ """
+ Remove occlusion_nclusters farthest points from point cloud with occlusion_dropout_rate probability
+
+ Arguments:
+ pc {np.ndarray} -- Nx3 point cloud
+ occlusion_nclusters {int} -- noof cluster to remove
+ occlusion_dropout_rate {float} -- prob of removal
+
+ Returns:
+ [np.ndarray] -- N > Mx3 point cloud
+ """
+ if occlusion_nclusters == 0 or occlusion_dropout_rate == 0.:
+ return pc
+
+ labels = utils.farthest_points(pc, occlusion_nclusters, utils.distance_by_translation_point)
+
+ removed_labels = np.unique(labels)
+ removed_labels = removed_labels[np.random.rand(removed_labels.shape[0]) < occlusion_dropout_rate]
+ if removed_labels.shape[0] == 0:
+ return pc
+ mask = np.ones(labels.shape, labels.dtype)
+ for l in removed_labels:
+ mask = np.logical_and(mask, labels != l)
+ return pc[mask]
+
+
+ def render_scene(self, estimate_normals=False, camera_pose=None, augment=True):
+ """
+ Renders scene depth map, transforms to regularized pointcloud and applies augmentations
+
+ Keyword Arguments:
+ estimate_normals {bool} -- calculate and return normals (default: {False})
+ camera_pose {[type]} -- camera pose to render the scene from. (default: {None})
+
+ Returns:
+ [pc, pc_normals, camera_pose, depth] -- [point cloud, point cloud normals, camera pose, depth]
+ """
+ if camera_pose is None:
+ viewing_index = np.random.randint(0, high=len(self._cam_orientations))
+ camera_orientation = self._cam_orientations[viewing_index]
+ camera_pose = self.get_cam_pose(camera_orientation)
+
+ in_camera_pose = copy.deepcopy(camera_pose)
+
+ # 0.005 s
+ _, depth, _, camera_pose = self._renderer.render(in_camera_pose, render_pc=False)
+ depth = self._augment_depth(depth)
+
+ pc = self._renderer._to_pointcloud(depth)
+ pc = utils.regularize_pc_point_count(pc, self._raw_num_points, use_farthest_point=self._use_farthest_point)
+ if augment:
+ pc = self._augment_pc(pc)
+
+ pc_normals = utils.estimate_normals_cam_from_pc(pc[:,:3], raw_num_points=self._raw_num_points) if estimate_normals else []
+
+ return pc, pc_normals, camera_pose, depth
+
+ def load_scene(self, render_path):
+ """
+ Return point cloud and camera pose. Used for loading saved renders.
+ Arguments:
+ scene_id {str} -- scene index
+ cam_pose_id {str} -- camera pose index as length 3 string with
+ leading zeros if necessary.
+
+ Returns:
+ [pc, camera_pose] -- [point cloud, camera pose]
+ or returns False if not found
+ """
+ # print('Loading: ', render_path)
+ data = np.load(render_path, allow_pickle=True)
+ pc_cam = data['pc_cam']
+ camera_pose = data['camera_pose']
+ pc_cam_aug = self._augment_pc(pc_cam)
+ return pc_cam_aug, camera_pose
+
+ def change_object(self, cad_path, cad_scale):
+ """
+ Change object in pyrender scene
+
+ Arguments:
+ cad_path {str} -- path to CAD model
+ cad_scale {float} -- scale of CAD model
+ """
+
+ self._renderer.change_object(cad_path, cad_scale)
+
+ def change_scene(self, obj_paths, obj_scales, obj_transforms, visualize=False):
+ """
+ Change pyrender scene
+
+ Arguments:
+ obj_paths {list[str]} -- path to CAD models in scene
+ obj_scales {list[float]} -- scales of CAD models
+ obj_transforms {list[np.ndarray]} -- poses of CAD models
+
+ Keyword Arguments:
+ visualize {bool} -- whether to update the visualizer as well (default: {False})
+ """
+ self._renderer.change_scene(obj_paths, obj_scales, obj_transforms)
+ if visualize:
+ self._visualizer.change_scene(obj_paths, obj_scales, obj_transforms)
+
+ def __del__(self):
+ print('********** terminating renderer **************')
+
+
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/checkpoints.py b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/checkpoints.py
new file mode 100644
index 0000000000..e502f4dbdc
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/checkpoints.py
@@ -0,0 +1,101 @@
+# From https://github.com/autonomousvision/occupancy_networks/
+import os
+import urllib
+import torch
+from torch.utils import model_zoo
+
+
+class CheckpointIO(object):
+ ''' CheckpointIO class.
+
+ It handles saving and loading checkpoints.
+
+ Args:
+ checkpoint_dir (str): path where checkpoints are saved
+ '''
+ def __init__(self, checkpoint_dir='./chkpts', **kwargs):
+ self.module_dict = kwargs
+ self.checkpoint_dir = checkpoint_dir
+ if not os.path.exists(checkpoint_dir):
+ os.makedirs(checkpoint_dir)
+
+ def register_modules(self, **kwargs):
+ ''' Registers modules in current module dictionary.
+ '''
+ self.module_dict.update(kwargs)
+
+ def save(self, filename, **kwargs):
+ ''' Saves the current module dictionary.
+
+ Args:
+ filename (str): name of output file
+ '''
+ if not os.path.isabs(filename):
+ filename = os.path.join(self.checkpoint_dir, filename)
+
+ outdict = kwargs
+ for k, v in self.module_dict.items():
+ outdict[k] = v.state_dict()
+ torch.save(outdict, filename)
+
+ def load(self, filename):
+ '''Loads a module dictionary from local file or url.
+
+ Args:
+ filename (str): name of saved module dictionary
+ '''
+ if is_url(filename):
+ return self.load_url(filename)
+ else:
+ return self.load_file(filename)
+
+ def load_file(self, filename):
+ '''Loads a module dictionary from file.
+
+ Args:
+ filename (str): name of saved module dictionary
+ '''
+
+ if not os.path.isabs(filename):
+ filename = os.path.join(self.checkpoint_dir, filename)
+
+ if os.path.exists(filename):
+ print(filename)
+ print('=> Loading checkpoint from local file...')
+ state_dict = torch.load(filename)
+ scalars = self.parse_state_dict(state_dict)
+ return scalars
+ else:
+ raise FileExistsError
+
+ def load_url(self, url):
+ '''Load a module dictionary from url.
+
+ Args:
+ url (str): url to saved model
+ '''
+ print(url)
+ print('=> Loading checkpoint from url...')
+ state_dict = model_zoo.load_url(url, progress=True)
+ scalars = self.parse_state_dict(state_dict)
+ return scalars
+
+ def parse_state_dict(self, state_dict):
+ '''Parse state_dict of model and return scalars.
+
+ Args:
+ state_dict (dict): State dict of model
+ '''
+
+ for k, v in self.module_dict.items():
+ if k in state_dict:
+ v.load_state_dict(state_dict[k])
+ else:
+ print('Warning: Could not find %s in checkpoint!' % k)
+ scalars = {k: v for k, v in state_dict.items()
+ if k not in self.module_dict}
+ return scalars
+
+def is_url(url):
+ scheme = urllib.parse.urlparse(url).scheme
+ return scheme in ('http', 'https')
\ No newline at end of file
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/config.yaml b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/config.yaml
new file mode 100644
index 0000000000..095078cd3c
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/config.yaml
@@ -0,0 +1,196 @@
+DATA:
+ classes: null
+ data_path: /home/user/Documents/grasp_generation/contact_graspnet_pytorch/acronym/
+ depth_augm:
+ clip: 0.005
+ gaussian_kernel: 0
+ sigma: 0.001
+ gripper_width: 0.08
+ input_normals: false
+ intrinsics: realsense
+ labels:
+ bin_weights:
+ - 0.16652107
+ - 0.21488856
+ - 0.37031708
+ - 0.55618503
+ - 0.75124664
+ - 0.93943357
+ - 1.07824539
+ - 1.19423112
+ - 1.55731375
+ - 3.17161779
+ filter_z: true
+ k: 1
+ max_radius: 0.005
+ num_neg_contacts: 0
+ num_pos_contacts: 8000
+ offset_bins:
+ - 0
+ - 0.00794435329
+ - 0.0158887021
+ - 0.0238330509
+ - 0.0317773996
+ - 0.0397217484
+ - 0.0476660972
+ - 0.055610446
+ - 0.0635547948
+ - 0.0714991435
+ - 0.08
+ to_gpu: false
+ z_val: -0.1
+ ndataset_points: 20000
+ num_point: 2048
+ num_test_scenes: 1000
+ pc_augm:
+ clip: 0.005
+ occlusion_dropout_rate: 0.0
+ occlusion_nclusters: 0
+ sigma: 0.0
+ raw_num_points: 20000
+ scene_contacts_path: scene_contacts
+ train_and_test: false
+ train_on_scenes: true
+ use_farthest_point: false
+ use_uniform_quaternions: false
+ view_sphere:
+ distance_range:
+ - 0.9
+ - 1.3
+ elevation:
+ - 30
+ - 150
+LOSS:
+ min_geom_loss_divisor: 1.0
+ offset_loss_type: sigmoid_cross_entropy
+ too_small_offset_pred_bin_factor: 0
+ topk_confidence: 512
+MODEL:
+ asymmetric_model: true
+ bin_offsets: true
+ dir_vec_length_offset: false
+ grasp_conf_head:
+ conv1d: 1
+ dropout_keep: 0.5
+ grasp_dir_head:
+ conv1d: 3
+ dropout_keep: 0.7
+ joint_head:
+ conv1d: 4
+ dropout_keep: 0.7
+ joint_heads: false
+ model: contact_graspnet
+ pointnet_fp_modules:
+ - mlp:
+ - 256
+ - 256
+ - mlp:
+ - 256
+ - 128
+ - mlp:
+ - 128
+ - 128
+ - 128
+ pointnet_sa_module:
+ group_all: true
+ mlp:
+ - 256
+ - 512
+ - 1024
+ pointnet_sa_modules_msg:
+ - mlp_list:
+ - - 32
+ - 32
+ - 64
+ - - 64
+ - 64
+ - 128
+ - - 64
+ - 96
+ - 128
+ npoint: 2048
+ nsample_list:
+ - 32
+ - 64
+ - 128
+ radius_list:
+ - 0.02
+ - 0.04
+ - 0.08
+ - mlp_list:
+ - - 64
+ - 64
+ - 128
+ - - 128
+ - 128
+ - 256
+ - - 128
+ - 128
+ - 256
+ npoint: 512
+ nsample_list:
+ - 64
+ - 64
+ - 128
+ radius_list:
+ - 0.04
+ - 0.08
+ - 0.16
+ - mlp_list:
+ - - 64
+ - 64
+ - 128
+ - - 128
+ - 128
+ - 256
+ - - 128
+ - 128
+ - 256
+ npoint: 128
+ nsample_list:
+ - 64
+ - 64
+ - 128
+ radius_list:
+ - 0.08
+ - 0.16
+ - 0.32
+ pred_contact_approach: false
+ pred_contact_base: false
+ pred_contact_offset: true
+ pred_contact_success: true
+ pred_grasps_adds: true
+ pred_grasps_adds_gt2pred: false
+OPTIMIZER:
+ adds_gt2pred_loss_weight: 1
+ adds_loss_weight: 10
+ approach_cosine_loss_weight: 1
+ backup_every: 200
+ batch_size: 6
+ bn_decay_clip: 0.99
+ bn_decay_decay_rate: 0.5
+ bn_decay_decay_step: 200000
+ bn_init_decay: 0.5
+ checkpoint_every: 100
+ decay_rate: 0.7
+ decay_step: 200000
+ dir_cosine_loss_weight: 1
+ learning_rate: 0.001
+ max_epoch: 200
+ momentum: 0.9
+ offset_loss_weight: 1
+ optimizer: adam
+ print_every: 10
+ score_ce_loss_weight: 1
+ val_every: 1
+TEST:
+ first_thres: 0.15
+ second_thres: 0.15
+ allow_zero_margin: 0
+ bin_vals: max
+ center_to_tip: 0.0
+ extra_opening: 0.005
+ max_farthest_points: 150
+ num_samples: 200
+ with_replacement: false
+ filter_thres: 0.0001
\ No newline at end of file
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/config_utils.py b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/config_utils.py
new file mode 100644
index 0000000000..7ebb5c2e6d
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/config_utils.py
@@ -0,0 +1,66 @@
+import os
+import yaml
+
+def recursive_key_value_assign(d,ks,v):
+ """
+ Recursive value assignment to a nested dict
+
+ Arguments:
+ d {dict} -- dict
+ ks {list} -- list of hierarchical keys
+ v {value} -- value to assign
+ """
+
+ if len(ks) > 1:
+ recursive_key_value_assign(d[ks[0]],ks[1:],v)
+ elif len(ks) == 1:
+ d[ks[0]] = v
+
+def load_config(checkpoint_dir, batch_size=None, max_epoch=None, data_path=None, arg_configs=[], save=False):
+ """
+ Loads yaml config file and overwrites parameters with function arguments and --arg_config parameters
+
+ Arguments:
+ checkpoint_dir {str} -- Checkpoint directory where config file was copied to
+
+ Keyword Arguments:
+ batch_size {int} -- [description] (default: {None})
+ max_epoch {int} -- "epochs" (number of scenes) to train (default: {None})
+ data_path {str} -- path to scenes with contact grasp data (default: {None})
+ arg_configs {list} -- Overwrite config parameters by hierarchical command line arguments (default: {[]})
+ save {bool} -- Save overwritten config file (default: {False})
+
+ Returns:
+ [dict] -- Config
+ """
+
+ config_path = os.path.join(checkpoint_dir, 'config.yaml')
+ config_path = config_path if os.path.exists(config_path) else os.path.join(os.path.dirname(__file__),'config.yaml')
+ with open(config_path,'r') as f:
+ global_config = yaml.safe_load(f)
+
+ for conf in arg_configs:
+ k_str, v = conf.split(':')
+ try:
+ v = eval(v)
+ except:
+ pass
+ ks = [int(k) if k.isdigit() else k for k in k_str.split('.')]
+
+ recursive_key_value_assign(global_config, ks, v)
+
+ if batch_size is not None:
+ global_config['OPTIMIZER']['batch_size'] = int(batch_size)
+ if max_epoch is not None:
+ global_config['OPTIMIZER']['max_epoch'] = int(max_epoch)
+ if data_path is not None:
+ global_config['DATA']['data_path'] = data_path
+
+ global_config['DATA']['classes'] = None
+
+ if save:
+ with open(os.path.join(checkpoint_dir, 'config.yaml'),'w') as f:
+ yaml.dump(global_config, f)
+
+ return global_config
+
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/contact_grasp_estimator.py b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/contact_grasp_estimator.py
new file mode 100644
index 0000000000..ad622d7c42
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/contact_grasp_estimator.py
@@ -0,0 +1,412 @@
+import importlib
+import numpy as np
+import torch
+import sys
+import os
+import time
+
+from contact_graspnet_pytorch import config_utils
+from contact_graspnet_pytorch.data import farthest_points, \
+ distance_by_translation_point, preprocess_pc_for_inference, \
+ regularize_pc_point_count, depth2pc, reject_median_outliers
+
+class GraspEstimator:
+ """
+ Class for building and inferencing Contact-GraspNet
+
+ :param cfg: config dict
+ """
+ def __init__(self, cfg):
+
+ if 'surface_grasp_logdir_folder' in cfg:
+ # for sim evaluation
+ self._contact_grasp_cfg = config_utils.load_config(cfg['surface_grasp_logdir_folder'], batch_size=1, arg_configs=cfg.arg_configs)
+ self._cfg = cfg
+ self._num_samples = self._cfg.num_samples
+ else:
+ self._contact_grasp_cfg = cfg
+
+ self._model_func = importlib.import_module('contact_graspnet_pytorch.' + self._contact_grasp_cfg['MODEL']['model'])
+ self._num_input_points = self._contact_grasp_cfg['DATA']['raw_num_points'] if 'raw_num_points' in self._contact_grasp_cfg['DATA'] else self._contact_grasp_cfg['DATA']['num_point']
+
+ print('model func: ', self._model_func)
+
+
+ self.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
+ self.model = self._model_func.ContactGraspnet(self._contact_grasp_cfg, self.device)
+ self.model.to(self.device)
+ # self.placeholders = self._model_func.placeholder_inputs(self._contact_grasp_cfg['OPTIMIZER']['batch_size'],
+ # self._num_input_points,
+ # self._contact_grasp_cfg['DATA']['input_normals'])
+ # self.model_ops = {}
+
+ def build_network(self):
+ """
+ Build tensorflow graph and grasp representation
+ :returns: tensorflow ops to infer the model predictions
+ """
+ global_config = self._contact_grasp_cfg
+
+ # Note the global_step=step parameter to minimize.
+ # That tells the optimizer to helpfully increment the 'step' parameter for you every time it trains.
+ step = tf.Variable(0)
+ bn_decay = get_bn_decay(step, global_config['OPTIMIZER'])
+ tf.summary.scalar('bn_decay', bn_decay)
+
+ print("--- Get model")
+ # Get model
+ end_points = self._model_func.get_model(self.placeholders['pointclouds_pl'], self.placeholders['is_training_pl'], global_config, bn_decay=bn_decay)
+
+ tf_bin_vals = self._model_func.get_bin_vals(global_config)
+ offset_bin_pred_vals = tf.gather_nd(tf_bin_vals, tf.expand_dims(tf.argmax(end_points['grasp_offset_head'], axis=2), axis=2)) if global_config['MODEL']['bin_offsets'] else end_points['grasp_offset_pred'][:,:,0]
+
+ grasp_preds = self._model_func.build_6d_grasp(end_points['approach_dir_head'], end_points['grasp_dir_head'], end_points['pred_points'], offset_bin_pred_vals, use_tf=True) # b x num_point x 4 x 4
+
+ self.model_ops = {'pointclouds_pl': self.placeholders['pointclouds_pl'],
+ 'cam_poses_pl': self.placeholders['cam_poses_pl'],
+ 'scene_idx_pl': self.placeholders['scene_idx_pl'],
+ 'is_training_pl': self.placeholders['is_training_pl'],
+ 'grasp_dir_pred': end_points['grasp_dir_head'],
+ 'binary_seg_head': end_points['binary_seg_head'],
+ 'binary_seg_pred': end_points['binary_seg_pred'],
+ 'grasp_offset_head': end_points['grasp_offset_head'],
+ 'grasp_offset_pred': end_points['grasp_offset_pred'],
+ 'approach_dir_pred': end_points['approach_dir_head'],
+ 'pred_points': end_points['pred_points'],
+ 'offset_pred_idcs_pc': tf.argmax(end_points['grasp_offset_head'], axis=2) if global_config['MODEL']['bin_offsets'] else None,
+ 'offset_bin_pred_vals': offset_bin_pred_vals,
+ 'grasp_preds': grasp_preds,
+ 'step': step,
+ 'end_points': end_points}
+
+ self.inference_ops = [self.model_ops['grasp_preds'], self.model_ops['binary_seg_pred'], self.model_ops['pred_points']]
+ if self.model_ops['offset_bin_pred_vals'] is None:
+ self.inference_ops.append(self.model_ops['grasp_offset_head'])
+ else:
+ self.inference_ops.append(self.model_ops['offset_bin_pred_vals'])
+
+ return self.model_ops
+
+ def load_weights(self, sess, saver, log_dir, mode='test'):
+ """
+ Load checkpoint weights
+ :param sess: tf.Session
+ :param saver: tf.train.Saver
+ """
+
+ chkpt = tf.train.get_checkpoint_state(log_dir)
+ if chkpt and chkpt.model_checkpoint_path:
+ print(('loading ', chkpt.model_checkpoint_path))
+ saver.restore(sess, chkpt.model_checkpoint_path)
+ else:
+ if mode == 'test':
+ print('no checkpoint in ', log_dir)
+ exit()
+ else:
+ sess.run(tf.global_variables_initializer())
+
+ def filter_segment(self, contact_pts, segment_pc, thres=0.00001):
+ """
+ Filter grasps to obtain contacts on specified point cloud segment
+
+ :param contact_pts: Nx3 contact points of all grasps in the scene
+ :param segment_pc: Mx3 segmented point cloud of the object of interest
+ :param thres: maximum distance in m of filtered contact points from segmented point cloud
+ :returns: Contact/Grasp indices that lie in the point cloud segment
+ """
+ filtered_grasp_idcs = np.array([],dtype=np.int32)
+
+ if contact_pts.shape[0] > 0 and segment_pc.shape[0] > 0:
+ try:
+ dists = contact_pts[:,:3].reshape(-1,1,3) - segment_pc.reshape(1,-1,3)
+ min_dists = np.min(np.linalg.norm(dists,axis=2),axis=1)
+ filtered_grasp_idcs = np.where(min_dists (obj_center - size/2), axis=1) & np.all(full_pc < (obj_center + size/2),axis=1)]
+ if np.any(partial_pc):
+ partial_pc = regularize_pc_point_count(partial_pc, self._contact_grasp_cfg['DATA']['raw_num_points'], use_farthest_point=self._contact_grasp_cfg['DATA']['use_farthest_point'])
+ pc_regions[i] = partial_pc
+ obj_centers[i] = obj_center
+
+ return pc_regions, obj_centers
+
+ def filter_pc_segments(self, pc_segments):
+ for i in pc_segments:
+ pc_segments[i] = reject_median_outliers(pc_segments[i], m=0.4, z_only=False)
+ return pc_segments
+
+
+ def predict_grasps(self, pc, constant_offset=False, convert_cam_coords=True, forward_passes=1):
+ """
+ Predict raw grasps on point cloud
+
+ :param sess: tf.Session
+ :param pc: Nx3 point cloud in camera coordinates
+ :param convert_cam_coords: Convert from OpenCV to internal training camera coordinates (x left, y up, z front) and converts grasps back to openCV coordinates
+ :param constant_offset: do not predict offset and place gripper at constant `extra_opening` distance from contact point
+ :param forward_passes: Number of forward passes to run on each point cloud. default: 1
+ :returns: (pred_grasps_cam, pred_scores, pred_points, gripper_openings) Predicted grasps/scores/contact-points/gripper-openings
+ """
+
+ # Convert point cloud coordinates from OpenCV to internal coordinates (x left, y up, z front)
+ pc, pc_mean = preprocess_pc_for_inference(pc.squeeze(), self._num_input_points, return_mean=True, convert_to_internal_coords=convert_cam_coords)
+
+ if len(pc.shape) == 2:
+ pc_batch = pc[np.newaxis,:,:]
+
+ if forward_passes > 1:
+ pc_batch = np.tile(pc_batch, (forward_passes,1,1))
+
+ pc_batch = torch.from_numpy(pc_batch).type(torch.float32).to(self.device)
+
+ # feed_dict = {self.placeholders['pointclouds_pl']: pc_batch,
+ # self.placeholders['is_training_pl']: False}
+
+ # Run model inference
+ # pred_grasps_cam, pred_scores, pred_points, offset_pred = sess.run(self.inference_ops, feed_dict=feed_dict)
+ pred = self.model(pc_batch)
+ pred_grasps_cam = pred['pred_grasps_cam']
+ pred_scores = pred['pred_scores']
+ pred_points = pred['pred_points']
+ offset_pred = pred['offset_pred']
+
+
+
+ pred_grasps_cam = pred_grasps_cam.detach().cpu().numpy()
+ pred_scores = pred_scores.detach().cpu().numpy()
+ pred_points = pred_points.detach().cpu().numpy()
+ offset_pred = offset_pred.detach().cpu().numpy()
+
+ pred_grasps_cam = pred_grasps_cam.reshape(-1, *pred_grasps_cam.shape[-2:])
+ pred_points = pred_points.reshape(-1, pred_points.shape[-1])
+ pred_scores = pred_scores.reshape(-1)
+ offset_pred = offset_pred.reshape(-1)
+
+ # uncenter grasps
+ pred_grasps_cam[:,:3, 3] += pc_mean.reshape(-1,3)
+ pred_points[:,:3] += pc_mean.reshape(-1,3)
+
+ if constant_offset:
+ offset_pred = np.array([[self._contact_grasp_cfg['DATA']['gripper_width']-self._contact_grasp_cfg['TEST']['extra_opening']]*self._contact_grasp_cfg['DATA']['num_point']])
+
+ gripper_openings = np.minimum(offset_pred + self._contact_grasp_cfg['TEST']['extra_opening'], self._contact_grasp_cfg['DATA']['gripper_width'])
+
+ with_replacement = self._contact_grasp_cfg['TEST']['with_replacement'] if 'with_replacement' in self._contact_grasp_cfg['TEST'] else False
+
+ selection_idcs = self.select_grasps(pred_points[:,:3], pred_scores,
+ self._contact_grasp_cfg['TEST']['max_farthest_points'],
+ self._contact_grasp_cfg['TEST']['num_samples'],
+ self._contact_grasp_cfg['TEST']['first_thres'],
+ self._contact_grasp_cfg['TEST']['second_thres'] if 'second_thres' in self._contact_grasp_cfg['TEST'] else self._contact_grasp_cfg['TEST']['first_thres'],
+ with_replacement=self._contact_grasp_cfg['TEST']['with_replacement'])
+
+ if not np.any(selection_idcs):
+ selection_idcs=np.array([], dtype=np.int32)
+
+ if 'center_to_tip' in self._contact_grasp_cfg['TEST'] and self._contact_grasp_cfg['TEST']['center_to_tip']:
+ pred_grasps_cam[:,:3, 3] -= pred_grasps_cam[:,:3,2]*(self._contact_grasp_cfg['TEST']['center_to_tip']/2)
+
+ # convert back to opencv coordinates
+ if convert_cam_coords:
+ pred_grasps_cam[:,:2, :] *= -1
+ pred_points[:,:2] *= -1
+
+ return pred_grasps_cam[selection_idcs], pred_scores[selection_idcs], pred_points[selection_idcs].squeeze(), gripper_openings[selection_idcs].squeeze()
+
+ def predict_scene_grasps(self, pc_full, pc_segments={}, local_regions=False, filter_grasps=False, forward_passes=1,
+ use_cam_boxes=True):
+ """
+ Predict num_point grasps on a full point cloud or in local box regions around point cloud segments.
+
+ Arguments:
+ sess {tf.Session} -- Tensorflow Session
+ pc_full {np.ndarray} -- Nx3 full scene point cloud
+
+ Keyword Arguments:
+ pc_segments {dict[int, np.ndarray]} -- Dict of Mx3 segmented point clouds of objects of interest (default: {{}})
+ local_regions {bool} -- crop 3D local regions around object segments for prediction (default: {False})
+ filter_grasps {bool} -- filter grasp contacts such that they only lie within object segments (default: {False})
+ forward_passes {int} -- Number of forward passes to run on each point cloud. (default: {1})
+
+ Returns:
+ [np.ndarray, np.ndarray, np.ndarray, np.ndarray] -- pred_grasps_cam, scores, contact_pts, gripper_openings
+ """
+
+ pred_grasps_cam, scores, contact_pts, gripper_openings = {}, {}, {}, {}
+
+ # Predict grasps in local regions or full pc
+ if local_regions:
+ print('using local regions')
+ if use_cam_boxes:
+ pc_regions, _ = self.extract_3d_cam_boxes(pc_full, pc_segments, min_size=0.2)
+ else:
+ pc_regions = self.filter_pc_segments(pc_segments)
+ for k, pc_region in pc_regions.items():
+ pred_grasps_cam[k], scores[k], contact_pts[k], gripper_openings[k] = self.predict_grasps(pc_region, convert_cam_coords=True, forward_passes=forward_passes)
+ else:
+ print('using full pc')
+ pc_full = regularize_pc_point_count(pc_full, self._contact_grasp_cfg['DATA']['raw_num_points'])
+ pred_grasps_cam[-1], scores[-1], contact_pts[-1], gripper_openings[-1] = self.predict_grasps(pc_full, convert_cam_coords=True, forward_passes=forward_passes)
+ print('Generated {} grasps'.format(len(pred_grasps_cam[-1])))
+
+ # Filter grasp contacts to lie within object segment
+ if filter_grasps:
+ segment_keys = contact_pts.keys() if local_regions else pc_segments.keys()
+ for k in segment_keys:
+ j = k if local_regions else -1
+ if np.any(pc_segments[k]) and np.any(contact_pts[j]):
+ segment_idcs = self.filter_segment(contact_pts[j], pc_segments[k], thres=self._contact_grasp_cfg['TEST']['filter_thres'])
+
+ pred_grasps_cam[k] = pred_grasps_cam[j][segment_idcs]
+ scores[k] = scores[j][segment_idcs]
+ contact_pts[k] = contact_pts[j][segment_idcs]
+ try:
+ gripper_openings[k] = gripper_openings[j][segment_idcs]
+ except:
+ print('skipped gripper openings {}'.format(gripper_openings[j]))
+
+ if local_regions and np.any(pred_grasps_cam[k]):
+ print('Generated {} grasps for object {}'.format(len(pred_grasps_cam[k]), k))
+ else:
+ print('skipping obj {} since np.any(pc_segments[k]) {} and np.any(contact_pts[j]) is {}'.format(k, np.any(pc_segments[k]), np.any(contact_pts[j])))
+
+ # if local_regions:
+ # # if not local_regions: # Was not
+ # del pred_grasps_cam[-1], scores[-1], contact_pts[-1], gripper_openings[-1]
+
+ return pred_grasps_cam, scores, contact_pts, gripper_openings
+
+ def select_grasps(self, contact_pts, contact_conf, max_farthest_points = 150, num_grasps = 200, first_thres = 0.25, second_thres = 0.2, with_replacement=False):
+ """
+ Select subset of num_grasps by contact confidence thresholds and farthest contact point sampling.
+
+ 1.) Samples max_farthest_points among grasp contacts with conf > first_thres
+ 2.) Fills up remaining grasp contacts to a maximum of num_grasps with highest confidence contacts with conf > second_thres
+
+ Arguments:
+ contact_pts {np.ndarray} -- num_point x 3 subset of input point cloud for which we have predictions
+ contact_conf {[type]} -- num_point x 1 confidence of the points being a stable grasp contact
+
+ Keyword Arguments:
+ max_farthest_points {int} -- Maximum amount from num_grasps sampled with farthest point sampling (default: {150})
+ num_grasps {int} -- Maximum number of grasp proposals to select (default: {200})
+ first_thres {float} -- first confidence threshold for farthest point sampling (default: {0.6})
+ second_thres {float} -- second confidence threshold for filling up grasp proposals (default: {0.6})
+ with_replacement {bool} -- Return fixed number of num_grasps with conf > first_thres and repeat if there are not enough (default: {False})
+
+ Returns:
+ [np.ndarray] -- Indices of selected contact_pts
+ """
+
+ grasp_conf = contact_conf.squeeze()
+ contact_pts = contact_pts.squeeze()
+
+ conf_idcs_greater_than = np.nonzero(grasp_conf > first_thres)[0]
+ _, center_indexes = farthest_points(contact_pts[conf_idcs_greater_than,:3], np.minimum(max_farthest_points, len(conf_idcs_greater_than)), distance_by_translation_point, return_center_indexes = True)
+
+ remaining_confidences = np.setdiff1d(np.arange(len(grasp_conf)), conf_idcs_greater_than[center_indexes])
+ sorted_confidences = np.argsort(grasp_conf)[::-1]
+ mask = np.in1d(sorted_confidences, remaining_confidences)
+ sorted_remaining_confidence_idcs = sorted_confidences[mask]
+
+ if with_replacement:
+ selection_idcs = list(conf_idcs_greater_than[center_indexes])
+ j=len(selection_idcs)
+ while j < num_grasps and conf_idcs_greater_than.shape[0] > 0:
+ selection_idcs.append(conf_idcs_greater_than[j%len(conf_idcs_greater_than)])
+ j+=1
+ selection_idcs = np.array(selection_idcs)
+
+ else:
+ remaining_idcs = sorted_remaining_confidence_idcs[:num_grasps-len(conf_idcs_greater_than[center_indexes])]
+ remaining_conf_idcs_greater_than = np.nonzero(grasp_conf[remaining_idcs] > second_thres)[0]
+ selection_idcs = np.union1d(conf_idcs_greater_than[center_indexes], remaining_idcs[remaining_conf_idcs_greater_than])
+ return selection_idcs
+
+ def extract_point_clouds(self, depth, K, segmap=None, rgb=None, z_range=[0.2,1.8], segmap_id=0, skip_border_objects=False, margin_px=5):
+ """
+ Converts depth map + intrinsics to point cloud.
+ If segmap is given, also returns segmented point clouds. If rgb is given, also returns pc_colors.
+
+ Arguments:
+ depth {np.ndarray} -- HxW depth map in m
+ K {np.ndarray} -- 3x3 camera Matrix
+
+ Keyword Arguments:
+ segmap {np.ndarray} -- HxW integer array that describes segeents (default: {None})
+ rgb {np.ndarray} -- HxW rgb image (default: {None})
+ z_range {list} -- Clip point cloud at minimum/maximum z distance (default: {[0.2,1.8]})
+ segmap_id {int} -- Only return point cloud segment for the defined id (default: {0})
+ skip_border_objects {bool} -- Skip segments that are at the border of the depth map to avoid artificial edges (default: {False})
+ margin_px {int} -- Pixel margin of skip_border_objects (default: {5})
+
+ Returns:
+ [np.ndarray, dict[int:np.ndarray], np.ndarray] -- Full point cloud, point cloud segments, point cloud colors
+ """
+
+ if K is None:
+ raise ValueError('K is required either as argument --K or from the input numpy file')
+
+ # Convert to pc
+ pc_full, pc_colors = depth2pc(depth, K, rgb)
+
+ # Threshold distance
+ if pc_colors is not None:
+ pc_colors = pc_colors[(pc_full[:,2] < z_range[1]) & (pc_full[:,2] > z_range[0])]
+ pc_full = pc_full[(pc_full[:,2] < z_range[1]) & (pc_full[:,2] > z_range[0])]
+
+ # Extract instance point clouds from segmap and depth map
+ pc_segments = {}
+ if segmap is not None:
+ pc_segments = {}
+ obj_instances = [segmap_id] if segmap_id else np.unique(segmap[segmap>0])
+ for i in obj_instances:
+ if skip_border_objects and not i==segmap_id:
+ obj_i_y, obj_i_x = np.where(segmap==i)
+ if np.any(obj_i_x < margin_px) or np.any(obj_i_x > segmap.shape[1]-margin_px) or np.any(obj_i_y < margin_px) or np.any(obj_i_y > segmap.shape[0]-margin_px):
+ print('object {} not entirely in image bounds, skipping'.format(i))
+ continue
+ inst_mask = segmap==i
+ pc_segment,_ = depth2pc(depth*inst_mask, K)
+ pc_segments[i] = pc_segment[(pc_segment[:,2] < z_range[1]) & (pc_segment[:,2] > z_range[0])] #regularize_pc_point_count(pc_segment, grasp_estimator._contact_grasp_cfg['DATA']['num_point'])
+
+ return pc_full, pc_segments, pc_colors
+
+ def predict_scene_grasps_from_depth_K_and_2d_seg(self, sess, depth, segmap, K, z_range=[0.2,1.8], local_regions=False, filter_grasps=False, segmap_id=0, skip_border_objects=False, margin_px=5, rgb=None, forward_passes=1):
+ """ Combines converting to point cloud(s) and predicting scene grasps into one function """
+
+ pc_full, pc_segments = self.extract_point_clouds(depth, K, segmap=segmap, segmap_id=segmap_id, skip_border_objects=skip_border_objects, margin_px=margin_px, z_range=z_range, rgb=rgb)
+
+ return self.predict_scene_grasps(sess, pc_full, pc_segments, local_regions=local_regions, filter_grasps=filter_grasps, forward_passes=forward_passes)
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/contact_graspnet.py b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/contact_graspnet.py
new file mode 100644
index 0000000000..9f5699ab71
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/contact_graspnet.py
@@ -0,0 +1,750 @@
+import os
+import sys
+import numpy as np
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+
+
+# Import pointnet library
+BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+ROOT_DIR = os.path.dirname(os.path.dirname(BASE_DIR))
+
+sys.path.append(os.path.join(BASE_DIR))
+sys.path.append(os.path.join(BASE_DIR, 'Pointnet_Pointnet2_pytorch'))
+# sys.path.append(os.path.join(BASE_DIR, 'pointnet2', 'utils'))
+
+# from tf_sampling import farthest_point_sample, gather_point
+# from tf_grouping import query_ball_point, group_point, knn_point
+
+from models.pointnet2_utils import PointNetSetAbstractionMsg, PointNetSetAbstraction, PointNetFeaturePropagation
+from contact_graspnet_pytorch import mesh_utils, utils
+
+class ContactGraspnet(nn.Module):
+ def __init__(self, global_config, device):
+ super(ContactGraspnet, self).__init__()
+
+ self.device = device
+ # self.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
+
+ # -- Extract config -- #
+ self.global_config = global_config
+ self.model_config = global_config['MODEL']
+ self.data_config = global_config['DATA']
+
+ radius_list_0 = self.model_config['pointnet_sa_modules_msg'][0]['radius_list']
+ radius_list_1 = self.model_config['pointnet_sa_modules_msg'][1]['radius_list']
+ radius_list_2 = self.model_config['pointnet_sa_modules_msg'][2]['radius_list']
+
+ nsample_list_0 = self.model_config['pointnet_sa_modules_msg'][0]['nsample_list']
+ nsample_list_1 = self.model_config['pointnet_sa_modules_msg'][1]['nsample_list']
+ nsample_list_2 = self.model_config['pointnet_sa_modules_msg'][2]['nsample_list']
+
+ mlp_list_0 = self.model_config['pointnet_sa_modules_msg'][0]['mlp_list'] # list of lists
+ mlp_list_1 = self.model_config['pointnet_sa_modules_msg'][1]['mlp_list']
+ mlp_list_2 = self.model_config['pointnet_sa_modules_msg'][2]['mlp_list']
+
+ npoint_0 = self.model_config['pointnet_sa_modules_msg'][0]['npoint']
+ npoint_1 = self.model_config['pointnet_sa_modules_msg'][1]['npoint']
+ npoint_2 = self.model_config['pointnet_sa_modules_msg'][2]['npoint']
+
+ fp_mlp_0 = self.model_config['pointnet_fp_modules'][0]['mlp']
+ fp_mlp_1 = self.model_config['pointnet_fp_modules'][1]['mlp']
+ fp_mlp_2 = self.model_config['pointnet_fp_modules'][2]['mlp']
+
+ sa_mlp = self.model_config['pointnet_sa_module']['mlp']
+ sa_group_all = self.model_config['pointnet_sa_module']['group_all']
+
+ self.input_normals = self.data_config['input_normals']
+ self.offset_bins = self.data_config['labels']['offset_bins']
+ self.joint_heads = self.model_config['joint_heads']
+
+ # For adding additional features
+ additional_channel = 3 if self.input_normals else 0
+
+ if 'asymmetric_model' in self.model_config and self.model_config['asymmetric_model']:
+ # It looks like there are xyz_points and "points". The points are normals I think?
+ self.sa1 = PointNetSetAbstractionMsg(npoint=npoint_0,
+ radius_list=radius_list_0,
+ nsample_list=nsample_list_0,
+ in_channel = 3 + additional_channel,
+ mlp_list=mlp_list_0)
+
+ # Sum the size of last layer of each mlp in mlp_list
+ sa1_out_channels = sum([mlp_list_0[i][-1] for i in range(len(mlp_list_0))])
+ self.sa2 = PointNetSetAbstractionMsg(npoint=npoint_1,
+ radius_list=radius_list_1,
+ nsample_list=nsample_list_1,
+ in_channel=sa1_out_channels,
+ mlp_list=mlp_list_1)
+
+ sa2_out_channels = sum([mlp_list_1[i][-1] for i in range(len(mlp_list_1))])
+ self.sa3 = PointNetSetAbstractionMsg(npoint=npoint_2,
+ radius_list=radius_list_2,
+ nsample_list=nsample_list_2,
+ in_channel=sa2_out_channels,
+ mlp_list=mlp_list_2)
+
+ sa3_out_channels = sum([mlp_list_2[i][-1] for i in range(len(mlp_list_2))])
+ self.sa4 = PointNetSetAbstraction(npoint=None,
+ radius=None,
+ nsample=None,
+ in_channel=3 + sa3_out_channels,
+ mlp=sa_mlp,
+ group_all=sa_group_all)
+
+ self.fp3 = PointNetFeaturePropagation(in_channel=sa_mlp[-1] + sa3_out_channels,
+ mlp=fp_mlp_2)
+
+ self.fp2 = PointNetFeaturePropagation(in_channel=fp_mlp_2[-1] + sa2_out_channels,
+ mlp=fp_mlp_1)
+
+ self.fp1 = PointNetFeaturePropagation(in_channel=fp_mlp_1[-1] + sa1_out_channels,
+ mlp=fp_mlp_0)
+ else:
+ raise NotImplementedError
+
+ if self.joint_heads:
+ raise NotImplementedError
+ else:
+ # Theres prob some bugs here
+
+ # Head for grasp direction
+ self.grasp_dir_head = nn.Sequential(
+ nn.Conv1d(fp_mlp_0[-1], 128, 1, padding=0),
+ nn.BatchNorm1d(128),
+ nn.ReLU(),
+ nn.Dropout(0.3), # p = 1 - keep_prob (tf is inverse of torch)
+ nn.Conv1d(128, 3, 1, padding=0)
+ )
+
+ # Remember to normalize the output of this head
+
+ # Head for grasp approach
+ self.grasp_approach_head = nn.Sequential(
+ nn.Conv1d(fp_mlp_0[-1], 128, 1, padding=0),
+ nn.BatchNorm1d(128),
+ nn.ReLU(),
+ nn.Dropout(0.3),
+ nn.Conv1d(128, 3, 1, padding=0)
+ )
+
+ # Head for grasp width
+ if self.model_config['dir_vec_length_offset']:
+ raise NotImplementedError
+ elif self.model_config['bin_offsets']:
+ self.grasp_offset_head = nn.Sequential(
+ nn.Conv1d(fp_mlp_0[-1], 128, 1, padding=0),
+ nn.BatchNorm1d(128),
+ nn.ReLU(),
+ nn.Conv1d(128, len(self.offset_bins) - 1, 1, padding=0)
+ )
+ else:
+ raise NotImplementedError
+
+ # Head for contact points
+ self.binary_seg_head = nn.Sequential(
+ nn.Conv1d(fp_mlp_0[-1], 128, 1, padding=0),
+ nn.BatchNorm1d(128),
+ nn.ReLU(),
+ nn.Dropout(0.5), # 0.5 in original code
+ nn.Conv1d(128, 1, 1, padding=0)
+ )
+
+ def forward(self, point_cloud):
+
+ # expensive, rather use random only
+ if 'raw_num_points' in self.data_config and self.data_config['raw_num_points'] != self.data_config['ndataset_points']:
+ raise NotImplementedError
+ point_cloud = gather_point(point_cloud, farthest_point_sample(data_config['ndataset_points'], point_cloud))
+
+ # Convert from tf to torch ordering
+ point_cloud = torch.transpose(point_cloud, 1, 2) # Now we have batch x channels (3 or 6) x num_points
+
+
+ l0_xyz = point_cloud[:, :3, :]
+ l0_points = point_cloud[:, 3:6, :] if self.input_normals else l0_xyz.clone()
+
+ # -- PointNet Backbone -- #
+ # Set Abstraction Layers
+ l1_xyz, l1_points = self.sa1(l0_xyz, l0_points)
+ l2_xyz, l2_points = self.sa2(l1_xyz, l1_points)
+ l3_xyz, l3_points = self.sa3(l2_xyz, l2_points)
+ l4_xyz, l4_points = self.sa4(l3_xyz, l3_points)
+
+ # Feature Propagation Layers
+ l3_points = self.fp3(l3_xyz, l4_xyz, l3_points, l4_points)
+ l2_points = self.fp2(l2_xyz, l3_xyz, l2_points, l3_points)
+ l1_points = self.fp1(l1_xyz, l2_xyz, l1_points, l2_points)
+
+ l0_points = l1_points
+ pred_points = l1_xyz
+
+ # -- Heads -- #
+ # Grasp Direction Head
+ grasp_dir_head = self.grasp_dir_head(l0_points)
+ grasp_dir_head_normed = F.normalize(grasp_dir_head, p=2, dim=1) # normalize along channels
+
+ # Grasp Approach Head
+ approach_dir_head = self.grasp_approach_head(l0_points)
+
+ # computer gram schmidt orthonormalization
+ dot_product = torch.sum(grasp_dir_head_normed * approach_dir_head, dim=1, keepdim=True)
+ projection = dot_product * grasp_dir_head_normed
+ approach_dir_head_orthog = F.normalize(approach_dir_head - projection, p=2, dim=1)
+
+ # Grasp Width Head
+ if self.model_config['dir_vec_length_offset']:
+ raise NotImplementedError
+ grasp_offset_head = F.normalize(grasp_dir_head, p=2, dim=1)
+ elif self.model_config['bin_offsets']:
+ grasp_offset_head = self.grasp_offset_head(l0_points)
+
+ # Binary Segmentation Head
+ binary_seg_head = self.binary_seg_head(l0_points)
+
+ # -- Create end_points -- #
+ # I don't think this is needed anymore
+ # end_points = {}
+ # end_points['grasp_dir_head'] = grasp_dir_head
+ # end_points['binary_seg_head'] = binary_seg_head
+ # end_points['binary_seg_pred'] = torch.sigmoid(binary_seg_head)
+ # end_points['grasp_offset_head'] = grasp_offset_head
+ # if self.model_config['bin_offsets']:
+ # end_points['grasp_offset_pred'] = torch.sigmoid(grasp_offset_head)
+ # else:
+ # end_points['grasp_offset_pred'] = grasp_offset_head
+ # end_points['approach_dir_head'] = approach_dir_head_orthog
+ # end_points['pred_points'] = pred_points
+
+ # # Get back to tf ordering
+ # for k, points in end_points.items():
+ # end_points[k] = points.permute(0, 2, 1)
+
+ # Expected outputs:
+ # pred_grasps_cam, pred_scores, pred_points, offset_pred = self.model(pc_batch)
+
+ # -- Construct Output -- #
+ # Get 6 DoF grasp pose
+ torch_bin_vals = self.get_bin_vals()
+
+ # PyTorch equivalent of tf.gather_nd with conditional
+ # I think the output should be B x N
+ if self.model_config['bin_offsets']:
+ argmax_indices = torch.argmax(grasp_offset_head, dim=1, keepdim=True)
+ offset_bin_pred_vals = torch_bin_vals[argmax_indices] # kinda sketch but works?
+ # expand_dims_indices = argmax_indices.unsqueeze(1)
+ # offset_bin_pred_vals = torch.gather(torch_bin_vals, 1, argmax_indices)
+ else:
+ offset_bin_pred_vals = grasp_offset_head[:, 0, :]
+
+ # TODO Start here
+ pred_grasps_cam = self.build_6d_grasp(approach_dir_head_orthog.permute(0, 2, 1),
+ grasp_dir_head_normed.permute(0, 2, 1),
+ pred_points.permute(0, 2, 1),
+ offset_bin_pred_vals.permute(0, 2, 1),
+ use_torch=True) # B x N x 4 x 4
+
+ # Get pred scores
+ pred_scores = torch.sigmoid(binary_seg_head).permute(0, 2, 1)
+
+ # Get pred points
+ pred_points = pred_points.permute(0, 2, 1)
+
+ # Get pred offsets
+ offset_pred = offset_bin_pred_vals
+
+ # -- Values to compute loss on -- #
+ # intermediates = {}
+ # intermediates['grasp_offset_head'] = grasp_offset_head
+
+ pred = dict(
+ pred_grasps_cam=pred_grasps_cam,
+ pred_scores=pred_scores,
+ pred_points=pred_points,
+ offset_pred=offset_pred,
+ grasp_offset_head=grasp_offset_head # For loss
+ )
+
+ # return pred_grasps_cam, pred_scores, pred_points, offset_pred, intermediates
+ return pred
+
+
+ def get_bin_vals(self):
+ """
+ Creates bin values for grasping widths according to bounds defined in config
+
+ Arguments:
+ global_config {dict} -- config
+
+ Returns:
+ torch.tensor -- bin value tensor
+ """
+ bins_bounds = np.array(self.data_config['labels']['offset_bins'])
+ if self.global_config['TEST']['bin_vals'] == 'max':
+ bin_vals = (bins_bounds[1:] + bins_bounds[:-1])/2
+ bin_vals[-1] = bins_bounds[-1]
+ elif self.global_config['TEST']['bin_vals'] == 'mean':
+ bin_vals = bins_bounds[1:]
+ else:
+ raise NotImplementedError
+
+ if not self.global_config['TEST']['allow_zero_margin']:
+ bin_vals = np.minimum(bin_vals, self.global_config['DATA']['gripper_width'] \
+ -self.global_config['TEST']['extra_opening'])
+
+ bin_vals = torch.tensor(bin_vals, dtype=torch.float32).to(self.device)
+ return bin_vals
+
+
+
+ # def build_6d_grasp(self, approach_dirs, base_dirs, contact_pts, thickness, use_tf=False, gripper_depth = 0.1034):
+ def build_6d_grasp(self, approach_dirs, base_dirs, contact_pts, thickness, use_torch=False, gripper_depth = 0.1034):
+ """
+ Build 6-DoF grasps + width from point-wise network predictions
+
+ Arguments:
+ approach_dirs {np.ndarray/tf.tensor} -- Nx3 approach direction vectors
+ base_dirs {np.ndarray/tf.tensor} -- Nx3 base direction vectors
+ contact_pts {np.ndarray/tf.tensor} -- Nx3 contact points
+ thickness {np.ndarray/tf.tensor} -- Nx1 grasp width
+
+ Keyword Arguments:
+ use_tf {bool} -- whether inputs and outputs are tf tensors (default: {False})
+ gripper_depth {float} -- distance from gripper coordinate frame to gripper baseline in m (default: {0.1034})
+
+ Returns:
+ np.ndarray -- Nx4x4 grasp poses in camera coordinates
+ """
+ # We are trying to build a stack of 4x4 homogeneous transform matricies of size B x N x 4 x 4.
+ # To do so, we calculate the rotation and translation portions according to the paper.
+ # This gives us positions as shown:
+ # [ R R R T ]
+ # [ R R R T ]
+ # [ R R R T ]
+ # [ 0 0 0 1 ] Note that the ^ dim is 2 and the --> dim is 3
+ # We need to pad with zeros and ones to get the final shape so we generate
+ # ones and zeros and stack them.
+ if use_torch:
+ grasp_R = torch.stack([base_dirs, torch.cross(approach_dirs,base_dirs),approach_dirs], dim=3) # B x N x 3 x 3
+ grasp_t = contact_pts + (thickness / 2) * base_dirs - gripper_depth * approach_dirs # B x N x 3
+ grasp_t = grasp_t.unsqueeze(3) # B x N x 3 x 1
+ ones = torch.ones((contact_pts.shape[0], contact_pts.shape[1], 1, 1), dtype=torch.float32).to(self.device) # B x N x 1 x 1
+ zeros = torch.zeros((contact_pts.shape[0], contact_pts.shape[1], 1, 3), dtype=torch.float32).to(self.device) # B x N x 1 x 3
+ homog_vec = torch.cat([zeros, ones], dim=3) # B x N x 1 x 4
+ grasps = torch.cat([torch.cat([grasp_R, grasp_t], dim=3), homog_vec], dim=2) # B x N x 4 x 4
+
+ else:
+ grasps = []
+ for i in range(len(contact_pts)):
+ grasp = np.eye(4)
+
+ grasp[:3,0] = base_dirs[i] / np.linalg.norm(base_dirs[i])
+ grasp[:3,2] = approach_dirs[i] / np.linalg.norm(approach_dirs[i])
+ grasp_y = np.cross( grasp[:3,2],grasp[:3,0])
+ grasp[:3,1] = grasp_y / np.linalg.norm(grasp_y)
+ # base_gripper xyz = contact + thickness / 2 * baseline_dir - gripper_d * approach_dir
+ grasp[:3,3] = contact_pts[i] + thickness[i] / 2 * grasp[:3,0] - gripper_depth * grasp[:3,2]
+ # grasp[0,3] = finger_width
+ grasps.append(grasp)
+ grasps = np.array(grasps)
+
+ return grasps
+
+class ContactGraspnetLoss(nn.Module):
+ def __init__(self, global_config, device):
+ super(ContactGraspnetLoss, self).__init__()
+ self.global_config = global_config
+
+ # -- Process config -- #
+ config_losses = [
+ 'pred_contact_base',
+ 'pred_contact_success', # True
+ 'pred_contact_offset', # True
+ 'pred_contact_approach',
+ 'pred_grasps_adds', # True
+ 'pred_grasps_adds_gt2pred'
+ ]
+
+ config_weights = [
+ 'dir_cosine_loss_weight',
+ 'score_ce_loss_weight', # True
+ 'offset_loss_weight', # True
+ 'approach_cosine_loss_weight',
+ 'adds_loss_weight', # True
+ 'adds_gt2pred_loss_weight'
+ ]
+
+ self.device = device
+
+ bin_weights = global_config['DATA']['labels']['bin_weights']
+ self.bin_weights = torch.tensor(bin_weights).to(self.device)
+ self.bin_vals = self._get_bin_vals().to(self.device)
+
+ for config_loss, config_weight in zip(config_losses, config_weights):
+ if global_config['MODEL'][config_loss]:
+ setattr(self, config_weight, global_config['OPTIMIZER'][config_weight])
+ else:
+ setattr(self, config_weight, 0.0)
+
+ self.gripper = mesh_utils.create_gripper('panda')
+ # gripper_control_points = self.gripper.get_control_point_tensor(self.global_config['OPTIMIZER']['batch_size']) # b x 5 x 3
+ # sym_gripper_control_points = self.gripper.get_control_point_tensor(self.global_config['OPTIMIZER']['batch_size'], symmetric=True)
+
+ # self.gripper_control_points_homog = torch.cat([gripper_control_points,
+ # torch.ones((self.global_config['OPTIMIZER']['batch_size'], gripper_control_points.shape[1], 1))], dim=2) # b x 5 x 4
+ # self.sym_gripper_control_points_homog = torch.cat([sym_gripper_control_points,
+ # torch.ones((self.global_config['OPTIMIZER']['batch_size'], gripper_control_points.shape[1], 1))], dim=2) # b x 5 x 4
+
+ n_copies = 1 # We will repeat this according to the batch size
+ gripper_control_points = self.gripper.get_control_point_tensor(n_copies) # b x 5 x 3
+ sym_gripper_control_points = self.gripper.get_control_point_tensor(n_copies, symmetric=True)
+
+ self.gripper_control_points_homog = torch.cat([gripper_control_points,
+ torch.ones((n_copies, gripper_control_points.shape[1], 1))], dim=2) # b x 5 x 4
+ self.sym_gripper_control_points_homog = torch.cat([sym_gripper_control_points,
+ torch.ones((n_copies, gripper_control_points.shape[1], 1))], dim=2) # b x 5 x 4
+
+ self.gripper_control_points_homog = self.gripper_control_points_homog.to(self.device)
+ self.sym_gripper_control_points_homog = self.sym_gripper_control_points_homog.to(self.device)
+
+
+ def forward(self, pred, target):
+ """
+ Computes loss terms from pointclouds, network predictions and labels
+
+ Arguments:
+ pointclouds_pl {tf.placeholder} -- bxNx3 input point clouds
+ end_points {dict[str:tf.variable]} -- endpoints of the network containing predictions
+ dir_labels_pc_cam {tf.variable} -- base direction labels in camera coordinates (bxNx3)
+ offset_labels_pc {tf.variable} -- grasp width labels (bxNx1)
+ grasp_success_labels_pc {tf.variable} -- contact success labels (bxNx1)
+ approach_labels_pc_cam {tf.variable} -- approach direction labels in camera coordinates (bxNx3)
+ global_config {dict} -- config dict
+
+ Returns:
+ [dir_cosine_loss, bin_ce_loss, offset_loss, approach_cosine_loss, adds_loss,
+ adds_loss_gt2pred, gt_control_points, pred_control_points, pos_grasps_in_view] -- All losses (not all are used for training)
+ """
+ pred_grasps_cam = pred['pred_grasps_cam'] # B x N x 4 x 4
+ pred_scores = pred['pred_scores'] # B x N x 1
+ pred_points = pred['pred_points'] # B x N x 3
+ # offset_pred = pred['offset_pred'] # B x N # We use the grasp_offset_head instead of this
+ grasp_offset_head = pred['grasp_offset_head'].permute(0, 2, 1) # B x N x 10
+
+
+ # # Generated in acronym_dataloader.py
+ # grasp_success_labels_pc = target['grasp_success_label'] # B x N
+ # grasp_offset_labels_pc = target['grasp_diff_label'] # B x N x 3
+
+ # approach_labels_pc_cam = target['grasp_approach_label'] # B x N x 3
+ # dir_labels_pc_cam = target['grasp_dir_label'] # B x N x 3
+ # pointclouds_pl = target['pc_cam'] # B x N x 3
+
+ # -- Interpolate Labels -- #
+ pos_contact_points = target['pos_contact_points'] # B x M x 3
+ pos_contact_dirs = target['pos_contact_dirs'] # B x M x 3
+ pos_finger_diffs = target['pos_finger_diffs'] # B x M
+ pos_approach_dirs = target['pos_approach_dirs'] # B x M x 3
+ camera_pose = target['camera_pose'] # B x 4 x 4
+
+ dir_labels_pc_cam, \
+ grasp_offset_labels_pc, \
+ grasp_success_labels_pc, \
+ approach_labels_pc_cam, \
+ debug = self._compute_labels(pred_points,
+ camera_pose,
+ pos_contact_points,
+ pos_contact_dirs,
+ pos_finger_diffs,
+ pos_approach_dirs)
+
+ # I think this is the number of positive grasps that are in view
+ min_geom_loss_divisor = float(self.global_config['LOSS']['min_geom_loss_divisor']) # This is 1.0
+ pos_grasps_in_view = torch.clamp(grasp_success_labels_pc.sum(dim=1), min=min_geom_loss_divisor) # B
+ # pos_grasps_in_view = torch.maximum(grasp_success_labels_pc.sum(dim=1), min_geom_loss_divisor) # B
+
+ total_loss = 0.0
+
+ if self.dir_cosine_loss_weight > 0:
+ raise NotImplementedError
+
+ # -- Grasp Confidence Loss -- #
+ if self.score_ce_loss_weight > 0: # TODO (bin_ce_loss)
+ bin_ce_loss = F.binary_cross_entropy(pred_scores, grasp_success_labels_pc, reduction='none') # B x N x 1
+ if 'topk_confidence' in self.global_config['LOSS'] \
+ and self.global_config['LOSS']['topk_confidence']:
+ bin_ce_loss, _ = torch.topk(bin_ce_loss.squeeze(), k=self.global_config['LOSS']['topk_confidence'])
+ bin_ce_loss = torch.mean(bin_ce_loss)
+
+ total_loss += self.score_ce_loss_weight * bin_ce_loss
+
+ # -- Grasp Offset / Thickness Loss -- #
+ if self.offset_loss_weight > 0: # TODO (offset_loss)
+ if self.global_config['MODEL']['bin_offsets']:
+ # Convert labels to multihot
+ bin_vals = self.global_config['DATA']['labels']['offset_bins']
+ grasp_offset_labels_multihot = self._bin_label_to_multihot(grasp_offset_labels_pc,
+ bin_vals)
+
+ if self.global_config['LOSS']['offset_loss_type'] == 'softmax_cross_entropy':
+ raise NotImplementedError
+
+ else:
+ offset_loss = F.binary_cross_entropy_with_logits(grasp_offset_head,
+ grasp_offset_labels_multihot, reduction='none') # B x N x 1
+ if 'too_small_offset_pred_bin_factor' in self.global_config['LOSS'] \
+ and self.global_config['LOSS']['too_small_offset_pred_bin_factor']:
+ raise NotImplementedError
+
+ # Weight loss for each bin
+ shaped_bin_weights = self.bin_weights[None, None, :]
+ offset_loss = (shaped_bin_weights * offset_loss).mean(axis=2)
+ else:
+ raise NotImplementedError
+ masked_offset_loss = offset_loss * grasp_success_labels_pc.squeeze()
+ # Divide each batch by the number of successful grasps in the batch
+ offset_loss = torch.mean(torch.sum(masked_offset_loss, axis=1, keepdim=True) / pos_grasps_in_view)
+
+ total_loss += self.offset_loss_weight * offset_loss
+
+ if self.approach_cosine_loss_weight > 0:
+ raise NotImplementedError
+
+ # -- 6 Dof Pose Loss -- #
+ if self.adds_loss_weight > 0: # TODO (adds_loss)
+ # Build groudn truth grasps and compare distances to predicted grasps
+
+ ### ADS Gripper PC Loss
+ # Get 6 DoF pose of predicted grasp
+ if self.global_config['MODEL']['bin_offsets']:
+ thickness_gt = self.bin_vals[torch.argmax(grasp_offset_labels_pc, dim=2)]
+ else:
+ thickness_gt = grasp_offset_labels_pc[:, :, 0]
+
+ # TODO: Move this to dataloader?
+ pred_grasps = pred_grasps_cam # B x N x 4 x 4
+ gt_grasps_proj = utils.build_6d_grasp(approach_labels_pc_cam,
+ dir_labels_pc_cam,
+ pred_points,
+ thickness_gt,
+ use_torch=True,
+ device=self.device) # b x N x 4 x 4
+ # Select positive grasps I think?
+ success_mask = grasp_success_labels_pc.bool()[:, :, :, None] # B x N x 1 x 1
+ success_mask = torch.broadcast_to(success_mask, gt_grasps_proj.shape) # B x N x 4 x 4
+ pos_gt_grasps_proj = torch.where(success_mask, gt_grasps_proj, torch.ones_like(gt_grasps_proj) * 100000) # B x N x 4 x 4
+
+ # Expand gripper control points to match number of points
+ # only use per point pred grasps but not per point gt grasps
+ control_points = self.gripper_control_points_homog.unsqueeze(1) # 1 x 1 x 5 x 4
+ control_points = control_points.repeat(pred_points.shape[0], pred_points.shape[1], 1, 1) # b x N x 5 x 4
+
+ sym_control_points = self.sym_gripper_control_points_homog.unsqueeze(1) # 1 x 1 x 5 x 4
+ sym_control_points = sym_control_points.repeat(pred_points.shape[0], pred_points.shape[1], 1, 1) # b x N x 5 x 4
+
+ pred_control_points = torch.matmul(control_points, pred_grasps.permute(0, 1, 3, 2))[:, :, :, :3] # b x N x 5 x 3
+
+ # Transform control points to ground truth locations
+ gt_control_points = torch.matmul(control_points, pos_gt_grasps_proj.permute(0, 1, 3, 2))[:, :, :, :3] # b x N x 5 x 3
+ sym_gt_control_points = torch.matmul(sym_control_points, pos_gt_grasps_proj.permute(0, 1, 3, 2))[:, :, :, :3] # b x N x 5 x 3
+
+ # Compute distances between predicted and ground truth control points
+ expanded_pred_control_points = pred_control_points.unsqueeze(2) # B x N x 1 x 5 x 3
+ expanded_gt_control_points = gt_control_points.unsqueeze(1) # B x 1 x N' x 5 x 3 I think N' == N
+ expanded_sym_gt_control_points = sym_gt_control_points.unsqueeze(1) # B x 1 x N' x 5 x 3 I think N' == N
+
+ # Sum of squared distances between all points
+ squared_add = torch.sum((expanded_pred_control_points - expanded_gt_control_points)**2, dim=(3, 4)) # B x N x N'
+ sym_squared_add = torch.sum((expanded_pred_control_points - expanded_sym_gt_control_points)**2, dim=(3, 4)) # B x N x N'
+
+ # Combine distances between gt and symmetric gt grasps
+ squared_adds = torch.concat([squared_add, sym_squared_add], dim=2) # B x N x 2N'
+
+ # Take min distance to gt grasp for each predicted grasp
+ squared_adds_k = torch.topk(squared_adds, k=1, dim=2, largest=False)[0] # B x N
+
+ # Mask negative grasps
+ # TODO: If there are bugs, its prob here. The original code sums on axis=1
+ # Which just determines if there is a successful grasp in the batch.
+ # I think we just want to select the positive grasps so the sum is redundant.
+ sum_grasp_success_labels = torch.sum(grasp_success_labels_pc, dim=2, keepdim=True)
+ binary_grasp_success_labels = torch.clamp(sum_grasp_success_labels, 0, 1)
+ min_adds = binary_grasp_success_labels * torch.sqrt(squared_adds_k) # B x N x 1
+ adds_loss = torch.sum(pred_scores * min_adds, dim=(1), keepdim=True) # B x 1
+ adds_loss = adds_loss.squeeze() / pos_grasps_in_view.squeeze() # B x 1
+ adds_loss = torch.mean(adds_loss)
+ total_loss += self.adds_loss_weight * adds_loss
+
+ if self.adds_gt2pred_loss_weight > 0:
+ raise NotImplementedError
+
+ loss_info = {
+ 'bin_ce_loss': bin_ce_loss, # Grasp success loss
+ 'offset_loss': offset_loss, # Grasp width loss
+ 'adds_loss': adds_loss, # Pose loss
+ }
+
+ return total_loss, loss_info
+
+ def _get_bin_vals(self):
+ """
+ Creates bin values for grasping widths according to bounds defined in config
+
+ Arguments:
+ global_config {dict} -- config
+
+ Returns:
+ tf.constant -- bin value tensor
+ """
+ bins_bounds = np.array(self.global_config['DATA']['labels']['offset_bins'])
+ if self.global_config['TEST']['bin_vals'] == 'max':
+ bin_vals = (bins_bounds[1:] + bins_bounds[:-1])/2
+ bin_vals[-1] = bins_bounds[-1]
+ elif self.global_config['TEST']['bin_vals'] == 'mean':
+ bin_vals = bins_bounds[1:]
+ else:
+ raise NotImplementedError
+
+ if not self.global_config['TEST']['allow_zero_margin']:
+ bin_vals = np.minimum(bin_vals, self.global_config['DATA']['gripper_width']-self.global_config['TEST']['extra_opening'])
+
+ bin_vals = torch.tensor(bin_vals, dtype=torch.float32)
+ return bin_vals
+
+ def _bin_label_to_multihot(self, cont_labels, bin_boundaries):
+ """
+ Computes binned grasp width labels from continuous labels and bin boundaries
+
+ Arguments:
+ cont_labels {torch.Tensor} -- continuous labels
+ bin_boundaries {list} -- bin boundary values
+
+ Returns:
+ torch.Tensor -- one/multi hot bin labels
+ """
+ bins = []
+ for b in range(len(bin_boundaries)-1):
+ bins.append(torch.logical_and(torch.greater_equal(cont_labels, bin_boundaries[b]), torch.less(cont_labels, bin_boundaries[b+1])))
+ multi_hot_labels = torch.cat(bins, dim=2)
+ multi_hot_labels = multi_hot_labels.to(torch.float32)
+
+ return multi_hot_labels
+
+
+ def _compute_labels(self,
+ processed_pc_cams: torch.Tensor,
+ camera_poses: torch.Tensor,
+ pos_contact_points: torch.Tensor,
+ pos_contact_dirs: torch.Tensor,
+ pos_finger_diffs: torch.Tensor,
+ pos_approach_dirs: torch.Tensor):
+ """
+ Project grasp labels defined on meshes onto rendered point cloud
+ from a camera pose via nearest neighbor contacts within a maximum radius.
+ All points without nearby successful grasp contacts are considered
+ negative contact points.
+
+ Here N is the number of points returned by the PointNet Encoder (2048) while
+ M is the number of points in the ground truth data. B is the batch size.
+ We are trying to assign a label to each of the PointNet points by
+ sampling the nearest ground truth points.
+
+ Arguments:
+ pc_cam_pl (torch.Tensor): (B, N, 3) point cloud in camera frame
+ camera_pose_pl (torch.Tensor): (B, 4, 4) homogenous camera pose
+ pos_contact_points (torch.Tensor): (B, M, 3) contact points in world frame (3 DoF points)
+ pos_contact_dirs (torch.Tensor): (B, M, 3) contact directions (origin centered vectors?)
+ pos_finger_diffs (torch.Tensor): (B, M, ) finger diffs in world frame (scalar distances)
+ pos_approach_dirs (torch.Tensor): (B, M, 3) approach directions in world frame (origin centered vectors?)
+ """
+ label_config = self.global_config['DATA']['labels']
+
+ nsample = label_config['k'] # Currently set to 1
+ radius = label_config['max_radius']
+ filter_z = label_config['filter_z']
+ z_val = label_config['z_val']
+
+ _, N, _ = processed_pc_cams.shape
+ B, M, _ = pos_contact_points.shape
+
+ # -- Make sure pcd is B x N x 3 -- #
+ if processed_pc_cams.shape[2] != 3:
+ xyz_cam = processed_pc_cams[:,:,:3] # N x 3
+ else:
+ xyz_cam = processed_pc_cams
+
+ # -- Transform Ground Truth to Camera Frame -- #
+ # Transform contact points to camera frame (This is a homogenous transform)
+ # We use matmul to accommodate batch
+ # pos_contact_points_cam = pos_contact_points @ (camera_poses[:3,:3].T) + camera_poses[:3,3][None,:]
+ pos_contact_points_cam = torch.matmul(pos_contact_points, camera_poses[:, :3, :3].transpose(1, 2)) \
+ + camera_poses[:,:3,3][:, None,:]
+
+ # Transform contact directions to camera frame (Don't translate because its a direction vector)
+ # pos_contact_dirs_cam = pos_contact_dirs @ camera_poses[:3,:3].T
+ pos_contact_dirs_cam = torch.matmul(pos_contact_dirs, camera_poses[:, :3,:3].transpose(1, 2))
+
+ # Make finger diffs B x M x 1
+ pos_finger_diffs = pos_finger_diffs[:, :, None]
+
+ # Transform approach directions to camera frame (Don't translate because its a direction vector)
+ # pos_approach_dirs_cam = pos_approach_dirs @ camera_poses[:3,:3].T
+ pos_approach_dirs_cam = torch.matmul(pos_approach_dirs, camera_poses[:, :3,:3].transpose(1, 2))
+
+ # -- Filter Direction -- #
+ # TODO: Figure out what is going on here
+ if filter_z:
+ # Filter out directions that are too far
+ dir_filter_passed = (pos_contact_dirs_cam[:, :, 2:3] > z_val).repeat(1, 1, 3)
+ pos_contact_points_cam = torch.where(dir_filter_passed,
+ pos_contact_points_cam,
+ torch.ones_like(pos_contact_points_cam) * 10000)
+
+ # -- Compute Distances -- #
+ # We want to compute the distance between each point in the point cloud and each contact point
+ # We can do this by expanding the dimensions of the tensors and then summing the squared differences
+ xyz_cam_expanded = torch.unsqueeze(xyz_cam, 2) # B x N x 1 x 3
+ pos_contact_points_cam_expanded = torch.unsqueeze(pos_contact_points_cam, 1) # B x 1 x M x 3
+ squared_dists_all = torch.sum((xyz_cam_expanded - pos_contact_points_cam_expanded)**2, dim=3) # B x N x M
+
+ # B x N x k, B x N x k
+ squared_dists_k, close_contact_pt_idcs = torch.topk(squared_dists_all,
+ k=nsample, dim=2, largest=False, sorted=False)
+
+ # -- Group labels -- #
+ grouped_contact_dirs_cam = utils.index_points(pos_contact_dirs_cam, close_contact_pt_idcs) # B x N x k x 3
+ grouped_finger_diffs = utils.index_points(pos_finger_diffs, close_contact_pt_idcs) # B x N x k x 1
+ grouped_approach_dirs_cam = utils.index_points(pos_approach_dirs_cam, close_contact_pt_idcs) # B x N x k x 3
+
+ # grouped_contact_dirs_cam = pos_contact_dirs_cam[close_contact_pt_idcs, :] # B x N x k x 3
+ # grouped_finger_diffs = pos_finger_diffs[close_contact_pt_idcs] # B x N x k x 1
+ # grouped_approach_dirs_cam = pos_approach_dirs_cam[close_contact_pt_idcs, :] # B x N x k x 3
+
+ # -- Compute Labels -- #
+ # Take mean over k nearest neighbors and normalize
+ dir_label = grouped_contact_dirs_cam.mean(dim=2) # B x N x 3
+ dir_label = F.normalize(dir_label, p=2, dim=2) # B x N x 3
+
+ diff_label = grouped_finger_diffs.mean(dim=2)# B x N x 1
+
+ approach_label = grouped_approach_dirs_cam.mean(dim=2) # B x N x 3
+ approach_label = F.normalize(approach_label, p=2, dim=2) # B x N x 3
+
+ grasp_success_label = torch.mean(squared_dists_k, dim=2, keepdim=True) < radius**2 # B x N x 1
+ grasp_success_label = grasp_success_label.type(torch.float32)
+
+ # debug = dict(
+ # xyz_cam = xyz_cam,
+ # pos_contact_points_cam = pos_contact_points_cam,
+ # )
+ debug = {}
+
+
+ return dir_label, diff_label, grasp_success_label, approach_label, debug
+
+
+
+
+
+
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/data.py b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/data.py
new file mode 100644
index 0000000000..483f1a7ff9
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/data.py
@@ -0,0 +1,702 @@
+import os
+import sys
+
+BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+sys.path.append(os.path.join(BASE_DIR))
+sys.path.append(os.path.join(BASE_DIR,'contact_graspnet'))
+sys.path.append(os.path.join(BASE_DIR,'Pointnet_Pointnet2_pytorch'))
+
+from PIL import Image
+import argparse
+import numpy as np
+import copy
+import cv2
+import glob
+import trimesh.transformations as tra
+from scipy.spatial import cKDTree
+
+import provider
+from contact_graspnet_pytorch.scene_renderer import SceneRenderer
+
+def load_scene_contacts(dataset_folder, test_split_only=False, num_test=None, scene_contacts_path='scene_contacts_new'):
+ """
+ Load contact grasp annotations from acronym scenes
+
+ Arguments:
+ dataset_folder {str} -- folder with acronym data and scene contacts
+
+ Keyword Arguments:
+ test_split_only {bool} -- whether to only return test split scenes (default: {False})
+ num_test {int} -- how many test scenes to use (default: {None})
+ scene_contacts_path {str} -- name of folder with scene contact grasp annotations (default: {'scene_contacts_new'})
+
+ Returns:
+ list(dicts) -- list of scene annotations dicts with object paths and transforms and grasp contacts and transforms.
+ """
+
+ scene_contact_paths = sorted(glob.glob(os.path.join(dataset_folder, scene_contacts_path, '*')))
+ if test_split_only:
+ scene_contact_paths = scene_contact_paths[-num_test:]
+ contact_infos = []
+ for contact_path in scene_contact_paths:
+ print(contact_path)
+ try:
+ npz = np.load(contact_path, allow_pickle=False)
+ contact_info = {'scene_contact_points':npz['scene_contact_points'],
+ 'obj_paths':npz['obj_paths'],
+ 'obj_transforms':npz['obj_transforms'],
+ 'obj_scales':npz['obj_scales'],
+ 'grasp_transforms':npz['grasp_transforms']}
+ contact_infos.append(contact_info)
+ except:
+ print('corrupt, ignoring..')
+ return contact_infos
+
+def preprocess_pc_for_inference(input_pc, num_point, pc_mean=None, return_mean=False, use_farthest_point=False, convert_to_internal_coords=False):
+ """
+ Various preprocessing of the point cloud (downsampling, centering, coordinate transforms)
+
+ Arguments:
+ input_pc {np.ndarray} -- Nx3 input point cloud
+ num_point {int} -- downsample to this amount of points
+
+ Keyword Arguments:
+ pc_mean {np.ndarray} -- use 3x1 pre-computed mean of point cloud (default: {None})
+ return_mean {bool} -- whether to return the point cloud mean (default: {False})
+ use_farthest_point {bool} -- use farthest point for downsampling (slow and suspectible to outliers) (default: {False})
+ convert_to_internal_coords {bool} -- Convert from opencv to internal coordinates (x left, y up, z front) (default: {False})
+
+ Returns:
+ [np.ndarray] -- num_pointx3 preprocessed point cloud
+ """
+ normalize_pc_count = input_pc.shape[0] != num_point
+ if normalize_pc_count:
+ pc = regularize_pc_point_count(input_pc, num_point, use_farthest_point=use_farthest_point).copy()
+ else:
+ pc = input_pc.copy()
+
+ if convert_to_internal_coords:
+ pc[:,:2] *= -1
+
+ if pc_mean is None:
+ pc_mean = np.mean(pc, 0)
+
+ pc -= np.expand_dims(pc_mean, 0)
+ if return_mean:
+ return pc, pc_mean
+ else:
+ return pc
+
+
+def inverse_transform(trans):
+ """
+ Computes the inverse of 4x4 transform.
+
+ Arguments:
+ trans {np.ndarray} -- 4x4 transform.
+
+ Returns:
+ [np.ndarray] -- inverse 4x4 transform
+ """
+ rot = trans[:3, :3]
+ t = trans[:3, 3]
+ rot = np.transpose(rot)
+ t = -np.matmul(rot, t)
+ output = np.zeros((4, 4), dtype=np.float32)
+ output[3][3] = 1
+ output[:3, :3] = rot
+ output[:3, 3] = t
+
+ return output
+
+def distance_by_translation_point(p1, p2):
+ """
+ Gets two nx3 points and computes the distance between point p1 and p2.
+ """
+ return np.sqrt(np.sum(np.square(p1 - p2), axis=-1))
+
+
+def farthest_points(data, nclusters, dist_func, return_center_indexes=False, return_distances=False, verbose=False):
+ """
+ Performs farthest point sampling on data points.
+ Args:
+ data: numpy array of the data points.
+ nclusters: int, number of clusters.
+ dist_dunc: distance function that is used to compare two data points.
+ return_center_indexes: bool, If True, returns the indexes of the center of
+ clusters.
+ return_distances: bool, If True, return distances of each point from centers.
+
+ Returns clusters, [centers, distances]:
+ clusters: numpy array containing the cluster index for each element in
+ data.
+ centers: numpy array containing the integer index of each center.
+ distances: numpy array of [npoints] that contains the closest distance of
+ each point to any of the cluster centers.
+ """
+ if nclusters >= data.shape[0]:
+ if return_center_indexes:
+ return np.arange(data.shape[0], dtype=np.int32), np.arange(data.shape[0], dtype=np.int32)
+
+ return np.arange(data.shape[0], dtype=np.int32)
+
+ clusters = np.ones((data.shape[0],), dtype=np.int32) * -1
+ distances = np.ones((data.shape[0],), dtype=np.float32) * 1e7
+ centers = []
+ for iter in range(nclusters):
+ index = np.argmax(distances)
+ centers.append(index)
+ shape = list(data.shape)
+ for i in range(1, len(shape)):
+ shape[i] = 1
+
+ broadcasted_data = np.tile(np.expand_dims(data[index], 0), shape)
+ new_distances = dist_func(broadcasted_data, data)
+ distances = np.minimum(distances, new_distances)
+ clusters[distances == new_distances] = iter
+ if verbose:
+ print('farthest points max distance : {}'.format(np.max(distances)))
+
+ if return_center_indexes:
+ if return_distances:
+ return clusters, np.asarray(centers, dtype=np.int32), distances
+ return clusters, np.asarray(centers, dtype=np.int32)
+
+ return clusters
+
+def reject_median_outliers(data, m=0.4, z_only=False):
+ """
+ Reject outliers with median absolute distance m
+
+ Arguments:
+ data {[np.ndarray]} -- Numpy array such as point cloud
+
+ Keyword Arguments:
+ m {[float]} -- Maximum absolute distance from median in m (default: {0.4})
+ z_only {[bool]} -- filter only via z_component (default: {False})
+
+ Returns:
+ [np.ndarray] -- Filtered data without outliers
+ """
+ if z_only:
+ d = np.abs(data[:,2:3] - np.median(data[:,2:3]))
+ else:
+ d = np.abs(data - np.median(data, axis=0, keepdims=True))
+
+ return data[np.sum(d, axis=1) < m]
+
+def regularize_pc_point_count(pc, npoints, use_farthest_point=False):
+ """
+ If point cloud pc has less points than npoints, it oversamples.
+ Otherwise, it downsample the input pc to have npoint points.
+ use_farthest_point: indicates
+
+ :param pc: Nx3 point cloud
+ :param npoints: number of points the regularized point cloud should have
+ :param use_farthest_point: use farthest point sampling to downsample the points, runs slower.
+ :returns: npointsx3 regularized point cloud
+ """
+
+ if pc.shape[0] > npoints:
+ if use_farthest_point:
+ _, center_indexes = farthest_points(pc, npoints, distance_by_translation_point, return_center_indexes=True)
+ else:
+ center_indexes = np.random.choice(range(pc.shape[0]), size=npoints, replace=False)
+ pc = pc[center_indexes, :]
+ else:
+ required = npoints - pc.shape[0]
+ if required > 0:
+ index = np.random.choice(range(pc.shape[0]), size=required)
+ pc = np.concatenate((pc, pc[index, :]), axis=0)
+ return pc
+
+def depth2pc(depth, K, rgb=None):
+ """
+ Convert depth and intrinsics to point cloud and optionally point cloud color
+ :param depth: hxw depth map in m
+ :param K: 3x3 Camera Matrix with intrinsics
+ :returns: (Nx3 point cloud, point cloud color)
+ """
+
+ mask = np.where(depth > 0)
+ x,y = mask[1], mask[0]
+
+ normalized_x = (x.astype(np.float32) - K[0,2])
+ normalized_y = (y.astype(np.float32) - K[1,2])
+
+ world_x = normalized_x * depth[y, x] / K[0,0]
+ world_y = normalized_y * depth[y, x] / K[1,1]
+ world_z = depth[y, x]
+
+ if rgb is not None:
+ rgb = rgb[y,x,:]
+
+ pc = np.vstack((world_x, world_y, world_z)).T
+ return (pc, rgb)
+
+
+def estimate_normals_cam_from_pc(self, pc_cam, max_radius=0.05, k=12):
+ """
+ Estimates normals in camera coords from given point cloud.
+
+ Arguments:
+ pc_cam {np.ndarray} -- Nx3 point cloud in camera coordinates
+
+ Keyword Arguments:
+ max_radius {float} -- maximum radius for normal computation (default: {0.05})
+ k {int} -- Number of neighbors for normal computation (default: {12})
+
+ Returns:
+ [np.ndarray] -- Nx3 point cloud normals
+ """
+ tree = cKDTree(pc_cam, leafsize=pc_cam.shape[0]+1)
+ _, ndx = tree.query(pc_cam, k=k, distance_upper_bound=max_radius, n_jobs=-1) # num_points x k
+
+ for c,idcs in enumerate(ndx):
+ idcs[idcs==pc_cam.shape[0]] = c
+ ndx[c,:] = idcs
+ neighbors = np.array([pc_cam[ndx[:,n],:] for n in range(k)]).transpose((1,0,2))
+ pc_normals = vectorized_normal_computation(pc_cam, neighbors)
+ return pc_normals
+
+def vectorized_normal_computation(pc, neighbors):
+ """
+ Vectorized normal computation with numpy
+
+ Arguments:
+ pc {np.ndarray} -- Nx3 point cloud
+ neighbors {np.ndarray} -- Nxkx3 neigbours
+
+ Returns:
+ [np.ndarray] -- Nx3 normal directions
+ """
+ diffs = neighbors - np.expand_dims(pc, 1) # num_point x k x 3
+ covs = np.matmul(np.transpose(diffs, (0, 2, 1)), diffs) # num_point x 3 x 3
+ covs /= diffs.shape[1]**2
+ # takes most time: 6-7ms
+ eigen_values, eigen_vectors = np.linalg.eig(covs) # num_point x 3, num_point x 3 x 3
+ orders = np.argsort(-eigen_values, axis=1) # num_point x 3
+ orders_third = orders[:,2] # num_point
+ directions = eigen_vectors[np.arange(pc.shape[0]),:,orders_third] # num_point x 3
+ dots = np.sum(directions * pc, axis=1) # num_point
+ directions[dots >= 0] = -directions[dots >= 0]
+ return directions
+
+def load_available_input_data(p, K=None):
+ """
+ Load available data from input file path.
+
+ Numpy files .npz/.npy should have keys
+ 'depth' + 'K' + (optionally) 'segmap' + (optionally) 'rgb'
+ or for point clouds:
+ 'xyz' + (optionally) 'xyz_color'
+
+ png files with only depth data (in mm) can be also loaded.
+ If the image path is from the GraspNet dataset, corresponding rgb, segmap and intrinic are also loaded.
+
+ :param p: .png/.npz/.npy file path that contain depth/pointcloud and optionally intrinsics/segmentation/rgb
+ :param K: 3x3 Camera Matrix with intrinsics
+ :returns: All available data among segmap, rgb, depth, cam_K, pc_full, pc_colors
+ """
+
+ segmap, rgb, depth, pc_full, pc_colors = None, None, None, None, None
+
+ if K is not None:
+ if isinstance(K,str):
+ cam_K = eval(K)
+ cam_K = np.array(K).reshape(3,3)
+
+ if '.np' in p:
+ data = np.load(p, allow_pickle=True)
+ if '.npz' in p:
+ keys = data.files
+ else:
+ keys = []
+ if len(data.shape) == 0:
+ data = data.item()
+ keys = data.keys()
+ elif data.shape[-1] == 3:
+ pc_full = data
+ else:
+ depth = data
+
+ if 'depth' in keys:
+ depth = data['depth']
+ if K is None and 'K' in keys:
+ cam_K = data['K'].reshape(3,3)
+ if 'segmap' in keys:
+ segmap = data['segmap']
+ if 'seg' in keys:
+ segmap = data['seg']
+ if 'rgb' in keys:
+ rgb = data['rgb']
+ rgb = np.array(cv2.cvtColor(rgb, cv2.COLOR_BGR2RGB))
+ elif 'xyz' in keys:
+ pc_full = np.array(data['xyz']).reshape(-1,3)
+ if 'xyz_color' in keys:
+ pc_colors = data['xyz_color']
+ elif '.png' in p:
+ if os.path.exists(p.replace('depth', 'label')):
+ # graspnet data
+ depth, rgb, segmap, K = load_graspnet_data(p)
+ elif os.path.exists(p.replace('depths', 'images').replace('npy', 'png')):
+ rgb = np.array(Image.open(p.replace('depths', 'images').replace('npy', 'png')))
+ else:
+ depth = np.array(Image.open(p))
+ else:
+ raise ValueError('{} is neither png nor npz/npy file'.format(p))
+
+ return segmap, rgb, depth, cam_K, pc_full, pc_colors
+
+def load_graspnet_data(rgb_image_path):
+
+ """
+ Loads data from the GraspNet-1Billion dataset
+ # https://graspnet.net/
+
+ :param rgb_image_path: .png file path to depth image in graspnet dataset
+ :returns: (depth, rgb, segmap, K)
+ """
+
+ depth = np.array(Image.open(rgb_image_path))/1000. # m to mm
+ segmap = np.array(Image.open(rgb_image_path.replace('depth', 'label')))
+ rgb = np.array(Image.open(rgb_image_path.replace('depth', 'rgb')))
+
+ # graspnet images are upside down, rotate for inference
+ # careful: rotate grasp poses back for evaluation
+ depth = np.rot90(depth,2)
+ segmap = np.rot90(segmap,2)
+ rgb = np.rot90(rgb,2)
+
+ if 'kinect' in rgb_image_path:
+ # Kinect azure:
+ K=np.array([[631.54864502 , 0. , 638.43517329],
+ [ 0. , 631.20751953, 366.49904066],
+ [ 0. , 0. , 1. ]])
+ else:
+ # Realsense:
+ K=np.array([[616.36529541 , 0. , 310.25881958],
+ [ 0. , 616.20294189, 236.59980774],
+ [ 0. , 0. , 1. ]])
+
+ return depth, rgb, segmap, K
+
+def center_pc_convert_cam(cam_poses, batch_data):
+ """
+ Converts from OpenGL to OpenCV coordinates, computes inverse of camera pose and centers point cloud
+
+ :param cam_poses: (bx4x4) Camera poses in OpenGL format
+ :param batch_data: (bxNx3) point clouds
+ :returns: (cam_poses, batch_data) converted
+ """
+ # OpenCV OpenGL conversion
+ for j in range(len(cam_poses)):
+ cam_poses[j,:3,1] = -cam_poses[j,:3,1]
+ cam_poses[j,:3,2] = -cam_poses[j,:3,2]
+ cam_poses[j] = inverse_transform(cam_poses[j])
+
+ pc_mean = np.mean(batch_data, axis=1, keepdims=True)
+ batch_data[:,:,:3] -= pc_mean[:,:,:3]
+ cam_poses[:,:3,3] -= pc_mean[:,0,:3]
+
+ return cam_poses, batch_data
+
+
+class PointCloudReader:
+ """
+ Class to load scenes, render point clouds and augment them during training
+
+ Arguments:
+ root_folder {str} -- acronym root folder
+ batch_size {int} -- number of rendered point clouds per-batch
+
+ Keyword Arguments:
+ raw_num_points {int} -- Number of random/farthest point samples per scene (default: {20000})
+ estimate_normals {bool} -- compute normals from rendered point cloud (default: {False})
+ caching {bool} -- cache scenes in memory (default: {True})
+ use_uniform_quaternions {bool} -- use uniform quaternions for camera sampling (default: {False})
+ scene_obj_scales {list} -- object scales in scene (default: {None})
+ scene_obj_paths {list} -- object paths in scene (default: {None})
+ scene_obj_transforms {np.ndarray} -- object transforms in scene (default: {None})
+ num_train_samples {int} -- training scenes (default: {None})
+ num_test_samples {int} -- test scenes (default: {None})
+ use_farthest_point {bool} -- use farthest point sampling to reduce point cloud dimension (default: {False})
+ intrinsics {str} -- intrinsics to for rendering depth maps (default: {None})
+ distance_range {tuple} -- distance range from camera to center of table (default: {(0.9,1.3)})
+ elevation {tuple} -- elevation range (90 deg is top-down) (default: {(30,150)})
+ pc_augm_config {dict} -- point cloud augmentation config (default: {None})
+ depth_augm_config {dict} -- depth map augmentation config (default: {None})
+ """
+ def __init__(
+ self,
+ root_folder,
+ batch_size=1,
+ raw_num_points = 20000,
+ estimate_normals = False,
+ caching=True,
+ use_uniform_quaternions=False,
+ scene_obj_scales=None,
+ scene_obj_paths=None,
+ scene_obj_transforms=None,
+ num_train_samples=None,
+ num_test_samples=None,
+ use_farthest_point = False,
+ intrinsics = None,
+ distance_range = (0.9,1.3),
+ elevation = (30,150),
+ pc_augm_config = None,
+ depth_augm_config = None
+ ):
+ self._root_folder = root_folder
+ self._batch_size = batch_size
+ self._raw_num_points = raw_num_points
+ self._caching = caching
+ self._num_train_samples = num_train_samples
+ self._num_test_samples = num_test_samples
+ self._estimate_normals = estimate_normals
+ self._use_farthest_point = use_farthest_point
+ self._scene_obj_scales = scene_obj_scales
+ self._scene_obj_paths = scene_obj_paths
+ self._scene_obj_transforms = scene_obj_transforms
+ self._distance_range = distance_range
+ self._pc_augm_config = pc_augm_config
+ self._depth_augm_config = depth_augm_config
+
+ self._current_pc = None
+ self._cache = {}
+
+ self._renderer = SceneRenderer(caching=True, intrinsics=intrinsics)
+
+ if use_uniform_quaternions:
+ quat_path = os.path.join(self._root_folder, 'uniform_quaternions/data2_4608.qua')
+ quaternions = [l[:-1].split('\t') for l in open(quat_path, 'r').readlines()]
+
+ quaternions = [[float(t[0]),
+ float(t[1]),
+ float(t[2]),
+ float(t[3])] for t in quaternions]
+ quaternions = np.asarray(quaternions)
+ quaternions = np.roll(quaternions, 1, axis=1)
+ self._all_poses = [tra.quaternion_matrix(q) for q in quaternions]
+ else:
+ self._cam_orientations = []
+ self._elevation = np.array(elevation)/180.
+ for az in np.linspace(0, np.pi * 2, 30):
+ for el in np.linspace(self._elevation[0], self._elevation[1], 30):
+ self._cam_orientations.append(tra.euler_matrix(0, -el, az))
+ self._coordinate_transform = tra.euler_matrix(np.pi/2, 0, 0).dot(tra.euler_matrix(0, np.pi/2, 0))
+
+ def get_cam_pose(self, cam_orientation):
+ """
+ Samples camera pose on shell around table center
+
+ Arguments:
+ cam_orientation {np.ndarray} -- 3x3 camera orientation matrix
+
+ Returns:
+ [np.ndarray] -- 4x4 homogeneous camera pose
+ """
+
+ distance = self._distance_range[0] + np.random.rand()*(self._distance_range[1]-self._distance_range[0])
+
+ extrinsics = np.eye(4)
+ extrinsics[0, 3] += distance
+ extrinsics = cam_orientation.dot(extrinsics)
+
+ cam_pose = extrinsics.dot(self._coordinate_transform)
+ # table height
+ cam_pose[2,3] += self._renderer._table_dims[2]
+ cam_pose[:3,:2]= -cam_pose[:3,:2]
+ return cam_pose
+
+ def _augment_pc(self, pc):
+ """
+ Augments point cloud with jitter and dropout according to config
+
+ Arguments:
+ pc {np.ndarray} -- Nx3 point cloud
+
+ Returns:
+ np.ndarray -- augmented point cloud
+ """
+
+ # not used because no artificial occlusion
+ if 'occlusion_nclusters' in self._pc_augm_config and self._pc_augm_config['occlusion_nclusters'] > 0:
+ pc = self.apply_dropout(pc,
+ self._pc_augm_config['occlusion_nclusters'],
+ self._pc_augm_config['occlusion_dropout_rate'])
+
+ if 'sigma' in self._pc_augm_config and self._pc_augm_config['sigma'] > 0:
+ pc = provider.jitter_point_cloud(pc[np.newaxis, :, :],
+ sigma=self._pc_augm_config['sigma'],
+ clip=self._pc_augm_config['clip'])[0]
+
+
+ return pc[:,:3]
+
+ def _augment_depth(self, depth):
+ """
+ Augments depth map with z-noise and smoothing according to config
+
+ Arguments:
+ depth {np.ndarray} -- depth map
+
+ Returns:
+ np.ndarray -- augmented depth map
+ """
+
+ if 'sigma' in self._depth_augm_config and self._depth_augm_config['sigma'] > 0:
+ clip = self._depth_augm_config['clip']
+ sigma = self._depth_augm_config['sigma']
+ noise = np.clip(sigma*np.random.randn(*depth.shape), -clip, clip)
+ depth += noise
+ if 'gaussian_kernel' in self._depth_augm_config and self._depth_augm_config['gaussian_kernel'] > 0:
+ kernel = self._depth_augm_config['gaussian_kernel']
+ depth_copy = depth.copy()
+ depth = cv2.GaussianBlur(depth,(kernel,kernel),0)
+ depth[depth_copy==0] = depth_copy[depth_copy==0]
+
+ return depth
+
+ def apply_dropout(self, pc, occlusion_nclusters, occlusion_dropout_rate):
+ """
+ Remove occlusion_nclusters farthest points from point cloud with occlusion_dropout_rate probability
+
+ Arguments:
+ pc {np.ndarray} -- Nx3 point cloud
+ occlusion_nclusters {int} -- noof cluster to remove
+ occlusion_dropout_rate {float} -- prob of removal
+
+ Returns:
+ [np.ndarray] -- N > Mx3 point cloud
+ """
+ if occlusion_nclusters == 0 or occlusion_dropout_rate == 0.:
+ return pc
+
+ labels = farthest_points(pc, occlusion_nclusters, distance_by_translation_point)
+
+ removed_labels = np.unique(labels)
+ removed_labels = removed_labels[np.random.rand(removed_labels.shape[0]) < occlusion_dropout_rate]
+ if removed_labels.shape[0] == 0:
+ return pc
+ mask = np.ones(labels.shape, labels.dtype)
+ for l in removed_labels:
+ mask = np.logical_and(mask, labels != l)
+ return pc[mask]
+
+ def get_scene_batch(self, scene_idx=None, return_segmap=False, save=False):
+ """
+ Render a batch of scene point clouds
+
+ Keyword Arguments:
+ scene_idx {int} -- index of the scene (default: {None})
+ return_segmap {bool} -- whether to render a segmap of objects (default: {False})
+ save {bool} -- Save training/validation data to npz file for later inference (default: {False})
+
+ Returns:
+ [batch_data, cam_poses, scene_idx] -- batch of rendered point clouds, camera poses and the scene_idx
+ """
+ dims = 6 if self._estimate_normals else 3
+ batch_data = np.empty((self._batch_size, self._raw_num_points, dims), dtype=np.float32)
+ cam_poses = np.empty((self._batch_size, 4, 4), dtype=np.float32)
+
+ if scene_idx is None:
+ scene_idx = np.random.randint(0,self._num_train_samples)
+
+ obj_paths = [os.path.join(self._root_folder, p) for p in self._scene_obj_paths[scene_idx]]
+ mesh_scales = self._scene_obj_scales[scene_idx]
+ obj_trafos = self._scene_obj_transforms[scene_idx]
+
+ self.change_scene(obj_paths, mesh_scales, obj_trafos, visualize=False)
+
+ batch_segmap, batch_obj_pcs = [], []
+ for i in range(self._batch_size):
+ # 0.005s
+ pc_cam, pc_normals, camera_pose, depth = self.render_random_scene(estimate_normals = self._estimate_normals)
+
+ if return_segmap:
+ segmap, _, obj_pcs = self._renderer.render_labels(depth, obj_paths, mesh_scales, render_pc=True)
+ batch_obj_pcs.append(obj_pcs)
+ batch_segmap.append(segmap)
+
+ batch_data[i,:,0:3] = pc_cam[:,:3]
+ if self._estimate_normals:
+ batch_data[i,:,3:6] = pc_normals[:,:3]
+ cam_poses[i,:,:] = camera_pose
+
+ if save:
+ K = np.array([[616.36529541,0,310.25881958 ],[0,616.20294189,236.59980774],[0,0,1]])
+ data = {'depth':depth, 'K':K, 'camera_pose':camera_pose, 'scene_idx':scene_idx}
+ if return_segmap:
+ data.update(segmap=segmap)
+ np.savez('results/{}_acronym.npz'.format(scene_idx), data)
+
+ if return_segmap:
+ return batch_data, cam_poses, scene_idx, batch_segmap, batch_obj_pcs
+ else:
+ return batch_data, cam_poses, scene_idx
+
+ def render_random_scene(self, estimate_normals=False, camera_pose=None):
+ """
+ Renders scene depth map, transforms to regularized pointcloud and applies augmentations
+
+ Keyword Arguments:
+ estimate_normals {bool} -- calculate and return normals (default: {False})
+ camera_pose {[type]} -- camera pose to render the scene from. (default: {None})
+
+ Returns:
+ [pc, pc_normals, camera_pose, depth] -- [point cloud, point cloud normals, camera pose, depth]
+ """
+ if camera_pose is None:
+ viewing_index = np.random.randint(0, high=len(self._cam_orientations))
+ camera_orientation = self._cam_orientations[viewing_index]
+ camera_pose = self.get_cam_pose(camera_orientation)
+
+ in_camera_pose = copy.deepcopy(camera_pose)
+
+ # 0.005 s
+ _, depth, _, camera_pose = self._renderer.render(in_camera_pose, render_pc=False)
+ depth = self._augment_depth(depth)
+
+ pc = self._renderer._to_pointcloud(depth)
+ pc = regularize_pc_point_count(pc, self._raw_num_points, use_farthest_point=self._use_farthest_point)
+ pc = self._augment_pc(pc)
+
+ pc_normals = estimate_normals_cam_from_pc(pc[:,:3], raw_num_points=self._raw_num_points) if estimate_normals else []
+
+ return pc, pc_normals, camera_pose, depth
+
+ def change_object(self, cad_path, cad_scale):
+ """
+ Change object in pyrender scene
+
+ Arguments:
+ cad_path {str} -- path to CAD model
+ cad_scale {float} -- scale of CAD model
+ """
+ raise NotImplementedError
+
+ self._renderer.change_object(cad_path, cad_scale)
+
+ def change_scene(self, obj_paths, obj_scales, obj_transforms, visualize=False):
+ """
+ Change pyrender scene
+
+ Arguments:
+ obj_paths {list[str]} -- path to CAD models in scene
+ obj_scales {list[float]} -- scales of CAD models
+ obj_transforms {list[np.ndarray]} -- poses of CAD models
+
+ Keyword Arguments:
+ visualize {bool} -- whether to update the visualizer as well (default: {False})
+ """
+ self._renderer.change_scene(obj_paths, obj_scales, obj_transforms)
+ if visualize:
+ self._visualizer.change_scene(obj_paths, obj_scales, obj_transforms)
+
+
+
+ def __del__(self):
+ print('********** terminating renderer **************')
+
+
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/datagen_scene_renderer.py b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/datagen_scene_renderer.py
new file mode 100644
index 0000000000..0c36a07aeb
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/datagen_scene_renderer.py
@@ -0,0 +1,294 @@
+from __future__ import print_function
+from __future__ import absolute_import
+
+import numpy as np
+import copy
+import cv2
+import math
+import os
+from copy import deepcopy
+import multiprocessing as mp
+# import pyrender
+import trimesh
+import trimesh.transformations as tra
+
+from mesh_utils import Object
+
+class SceneRenderer:
+ import pyrender
+ def __init__(self, intrinsics=None, fov=np.pi / 6, caching=True, viewing_mode=False):
+ """Renders depth with given intrinsics during training.
+
+ Keyword Arguments:
+ intrinsics {str} -- camera name from 'kinect_azure', 'realsense' (default: {None})
+ fov {float} -- field of view, ignored if inrinsics is not None (default: {np.pi/6})
+ caching {bool} -- whether to cache object meshes (default: {True})
+ viewing_mode {bool} -- visualize scene (default: {False})
+ """
+
+ self._fov = fov
+
+ self._scene = self.pyrender.Scene()
+ self._table_dims = [1.0, 1.2, 0.6]
+ self._table_pose = np.eye(4)
+ self._viewer = viewing_mode
+ if viewing_mode:
+ self._viewer = self.pyrender.Viewer(
+ self._scene,
+ use_raymond_lighting=True,
+ run_in_thread=True)
+
+ self._intrinsics = intrinsics
+ if self._intrinsics == 'realsense':
+ self._fx=616.36529541
+ self._fy=616.20294189
+ self._cx=310.25881958
+ self._cy=236.59980774
+ self._znear=0.04
+ self._zfar=20
+ self._height=480
+ self._width=640
+ elif self._intrinsics == 'kinect_azure':
+ self._fx=631.54864502
+ self._fy=631.20751953
+ self._cx=638.43517329
+ self._cy=366.49904066
+ self._znear=0.04
+ self._zfar=20
+ self._height=720
+ self._width=1280
+
+ self._add_table_node()
+ self._init_camera_renderer()
+
+ self._current_context = None
+ self._cache = {} if caching else None
+ self._caching = caching
+
+ def _init_camera_renderer(self):
+ """
+ If not in visualizing mode, initialize camera with given intrinsics
+ """
+
+ if self._viewer:
+ return
+
+ if self._intrinsics in ['kinect_azure', 'realsense']:
+ camera = self.pyrender.IntrinsicsCamera(self._fx, self._fy, self._cx, self._cy, self._znear, self._zfar)
+ self._camera_node = self._scene.add(camera, pose=np.eye(4), name='camera')
+ self.renderer = self.pyrender.OffscreenRenderer(viewport_width=self._width,
+ viewport_height=self._height,
+ point_size=1.0)
+ else:
+ camera = self.pyrender.PerspectiveCamera(yfov=self._fov, aspectRatio=1.0, znear=0.001) # do not change aspect ratio
+ self._camera_node = self._scene.add(camera, pose=tra.euler_matrix(np.pi, 0, 0), name='camera')
+ self.renderer = self.pyrender.OffscreenRenderer(400, 400)
+
+
+ def _add_table_node(self):
+ """
+ Adds table mesh and sets pose
+ """
+ if self._viewer:
+ return
+ table_mesh = trimesh.creation.box(self._table_dims)
+ mesh = self.pyrender.Mesh.from_trimesh(table_mesh)
+
+ table_node = self.pyrender.Node(mesh=mesh, name='table')
+ self._scene.add_node(table_node)
+ self._scene.set_pose(table_node, self._table_pose)
+
+
+ def _load_object(self, path, scale):
+ """
+ Load a mesh, scale and center it
+
+ Arguments:
+ path {str} -- path to mesh
+ scale {float} -- scale of the mesh
+
+ Returns:
+ dict -- contex with loaded mesh info
+ """
+ if (path, scale) in self._cache:
+ return self._cache[(path, scale)]
+ obj = Object(path)
+ obj.rescale(scale)
+
+ tmesh = obj.mesh
+ tmesh_mean = np.mean(tmesh.vertices, 0)
+ tmesh.vertices -= np.expand_dims(tmesh_mean, 0)
+
+ lbs = np.min(tmesh.vertices, 0)
+ ubs = np.max(tmesh.vertices, 0)
+ object_distance = np.max(ubs - lbs) * 5
+
+ mesh = self.pyrender.Mesh.from_trimesh(tmesh)
+
+ context = {
+ 'name': path + '_' + str(scale),
+ 'tmesh': copy.deepcopy(tmesh),
+ 'distance': object_distance,
+ 'node': self.pyrender.Node(mesh=mesh, name=path + '_' + str(scale)),
+ 'mesh_mean': np.expand_dims(tmesh_mean, 0),
+ }
+
+ self._cache[(path, scale)] = context
+
+ return self._cache[(path, scale)]
+
+ def change_scene(self, obj_paths, obj_scales, obj_transforms):
+ """Remove current objects and add new ones to the scene
+
+ Arguments:
+ obj_paths {list} -- list of object mesh paths
+ obj_scales {list} -- list of object scales
+ obj_transforms {list} -- list of object transforms
+ """
+ if self._viewer:
+ self._viewer.render_lock.acquire()
+ for n in self._scene.get_nodes():
+ if n.name not in ['table', 'camera', 'parent']:
+ self._scene.remove_node(n)
+
+ if not self._caching:
+ self._cache = {}
+
+ for p,t,s in zip(obj_paths, obj_transforms, obj_scales):
+
+ object_context = self._load_object(p, s)
+ object_context = deepcopy(object_context)
+
+ self._scene.add_node(object_context['node'])
+ self._scene.set_pose(object_context['node'], t)
+
+ if self._viewer:
+ self._viewer.render_lock.release()
+
+ def _to_pointcloud(self, depth):
+ """Convert depth map to point cloud
+
+ Arguments:
+ depth {np.ndarray} -- HxW depth map
+
+ Returns:
+ np.ndarray -- Nx4 homog. point cloud
+ """
+ if self._intrinsics in ['kinect_azure', 'realsense']:
+ fy = self._fy
+ fx = self._fx
+ height = self._height
+ width = self._width
+ cx = self._cx
+ cy = self._cy
+
+ mask = np.where(depth > 0)
+
+ x = mask[1]
+ y = mask[0]
+
+ normalized_x = (x.astype(np.float32) - cx)
+ normalized_y = (y.astype(np.float32) - cy)
+ else:
+ fy = fx = 0.5 / np.tan(self._fov * 0.5) # aspectRatio is one.
+ height = depth.shape[0]
+ width = depth.shape[1]
+
+ mask = np.where(depth > 0)
+
+ x = mask[1]
+ y = mask[0]
+
+ normalized_x = (x.astype(np.float32) - width * 0.5) / width
+ normalized_y = (y.astype(np.float32) - height * 0.5) / height
+
+ world_x = normalized_x * depth[y, x] / fx
+ world_y = normalized_y * depth[y, x] / fy
+ world_z = depth[y, x]
+ ones = np.ones(world_z.shape[0], dtype=np.float32)
+
+ return np.vstack((world_x, world_y, world_z, ones)).T
+
+
+ def render(self, pose, render_pc=True):
+ """Render object or scene in camera pose
+
+ Arguments:
+ pose {np.ndarray} -- 4x4 camera pose
+
+ Keyword Arguments:
+ render_pc {bool} -- whether to convert depth map to point cloud (default: {True})
+
+ Returns:
+ [np.ndarray, np.ndarray, np.ndarray, np.ndarray] -- HxWx3 color, HxW depth, Nx4 point cloud, 4x4 camera pose
+ """
+
+ transferred_pose = pose.copy()
+ self._scene.set_pose(self._camera_node, transferred_pose)
+
+ color, depth = self.renderer.render(self._scene)
+
+ if render_pc:
+ pc = self._to_pointcloud(depth)
+ else:
+ pc = None
+
+ return color, depth, pc, transferred_pose
+
+ def render_labels(self, full_depth, obj_paths, obj_scales, render_pc=False):
+ """Render instance segmentation map
+
+ Arguments:
+ full_depth {np.ndarray} -- HxW depth map
+ obj_paths {list} -- list of object paths in scene
+ obj_scales {list} -- list of object scales in scene
+
+ Keyword Arguments:
+ render_pc {bool} -- whether to return object-wise point clouds (default: {False})
+
+ Returns:
+ [np.ndarray, list, dict] -- integer segmap with 0=background, list of
+ corresponding object names, dict of corresponding point clouds
+ """
+
+ scene_object_nodes = []
+ for n in self._scene.get_nodes():
+ if n.name not in ['camera', 'parent']:
+ n.mesh.is_visible = False
+ if n.name != 'table':
+ scene_object_nodes.append(n)
+
+ obj_names = [path + '_' + str(scale) for path, scale in zip(obj_paths,obj_scales)]
+
+
+ pcs = {}
+ output = np.zeros(full_depth.shape, np.uint8)
+ for n in scene_object_nodes:
+ n.mesh.is_visible = True
+
+ depth = self.renderer.render(self._scene)[1]
+ mask = np.logical_and(
+ (np.abs(depth - full_depth) < 1e-6), np.abs(full_depth) > 0
+ )
+ if not np.any(mask):
+ continue
+ if np.any(output[mask] != 0):
+ raise ValueError('wrong label')
+
+ indices = [i+1 for i, x in enumerate(obj_names) if x == n.name]
+ for i in indices:
+ if not np.any(output==i):
+ print('')
+ output[mask] = i
+ break
+
+ n.mesh.is_visible = False
+
+ if render_pc:
+ pcs[i] = self._to_pointcloud(depth*mask)
+
+ for n in self._scene.get_nodes():
+ if n.name not in ['camera', 'parent']:
+ n.mesh.is_visible = True
+
+ return output, ['BACKGROUND'] + obj_names, pcs
\ No newline at end of file
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/inference.py b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/inference.py
new file mode 100644
index 0000000000..29223ccd56
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/inference.py
@@ -0,0 +1,128 @@
+import glob
+import os
+import argparse
+import sys
+
+# Simple fix: add parent directory to path to make imports work when running directly
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
+
+import torch
+import numpy as np
+
+# Simple conditional import
+try:
+ from contact_graspnet_pytorch.contact_grasp_estimator import GraspEstimator
+ from contact_graspnet_pytorch import config_utils
+ from contact_graspnet_pytorch.visualization_utils_o3d import visualize_grasps, show_image
+ from contact_graspnet_pytorch.checkpoints import CheckpointIO
+ from data import load_available_input_data
+except ImportError:
+ # When running as script directly
+ from contact_grasp_estimator import GraspEstimator
+ import config_utils
+ from visualization_utils_o3d import visualize_grasps, show_image
+ from checkpoints import CheckpointIO
+ from data import load_available_input_data
+
+def inference(global_config,
+ ckpt_dir,
+ input_paths,
+ local_regions=True,
+ filter_grasps=True,
+ skip_border_objects=False,
+ z_range = [0.2,1.8],
+ forward_passes=1,
+ K=None,):
+ """
+ Predict 6-DoF grasp distribution for given model and input data
+
+ :param global_config: config.yaml from checkpoint directory
+ :param checkpoint_dir: checkpoint directory
+ :param input_paths: .png/.npz/.npy file paths that contain depth/pointcloud and optionally intrinsics/segmentation/rgb
+ :param K: Camera Matrix with intrinsics to convert depth to point cloud
+ :param local_regions: Crop 3D local regions around given segments.
+ :param skip_border_objects: When extracting local_regions, ignore segments at depth map boundary.
+ :param filter_grasps: Filter and assign grasp contacts according to segmap.
+ :param segmap_id: only return grasps from specified segmap_id.
+ :param z_range: crop point cloud at a minimum/maximum z distance from camera to filter out outlier points. Default: [0.2, 1.8] m
+ :param forward_passes: Number of forward passes to run on each point cloud. Default: 1
+ """
+ # Build the model
+ grasp_estimator = GraspEstimator(global_config)
+
+ # Load the weights
+ model_checkpoint_dir = os.path.join(ckpt_dir, 'checkpoints')
+ checkpoint_io = CheckpointIO(checkpoint_dir=model_checkpoint_dir, model=grasp_estimator.model)
+ try:
+ load_dict = checkpoint_io.load('model.pt')
+ except FileExistsError:
+ print('No model checkpoint found')
+ load_dict = {}
+
+
+ os.makedirs('results', exist_ok=True)
+
+ # Process example test scenes
+ for p in glob.glob(input_paths):
+ print('Loading ', p)
+
+ pc_segments = {}
+ segmap, rgb, depth, cam_K, pc_full, pc_colors = load_available_input_data(p, K=K)
+
+ if segmap is None and (local_regions or filter_grasps):
+ raise ValueError('Need segmentation map to extract local regions or filter grasps')
+
+ if pc_full is None:
+ print('Converting depth to point cloud(s)...')
+ pc_full, pc_segments, pc_colors = grasp_estimator.extract_point_clouds(depth, cam_K, segmap=segmap, rgb=rgb,
+ skip_border_objects=skip_border_objects,
+ z_range=z_range)
+
+ print(pc_full.shape)
+
+ print('Generating Grasps...')
+ pred_grasps_cam, scores, contact_pts, _ = grasp_estimator.predict_scene_grasps(pc_full,
+ pc_segments=pc_segments,
+ local_regions=local_regions,
+ filter_grasps=filter_grasps,
+ forward_passes=forward_passes)
+
+ # Save results
+ np.savez('results/predictions_{}'.format(os.path.basename(p.replace('png','npz').replace('npy','npz'))),
+ pc_full=pc_full, pred_grasps_cam=pred_grasps_cam, scores=scores, contact_pts=contact_pts, pc_colors=pc_colors)
+
+ # Visualize results
+ show_image(rgb, segmap)
+ visualize_grasps(pc_full, pred_grasps_cam, scores, plot_opencv_cam=True, pc_colors=pc_colors)
+
+ if not glob.glob(input_paths):
+ print('No files found: ', input_paths)
+
+if __name__ == "__main__":
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--ckpt_dir', default='checkpoints/contact_graspnet', help='Log dir')
+ parser.add_argument('--np_path', default='test_data/7.npy', help='Input data: npz/npy file with keys either "depth" & camera matrix "K" or just point cloud "pc" in meters. Optionally, a 2D "segmap"')
+ parser.add_argument('--K', default=None, help='Flat Camera Matrix, pass as "[fx, 0, cx, 0, fy, cy, 0, 0 ,1]"')
+ parser.add_argument('--z_range', default=[0.2,1.8], help='Z value threshold to crop the input point cloud')
+ parser.add_argument('--local_regions', action='store_true', default=True, help='Crop 3D local regions around given segments.')
+ parser.add_argument('--filter_grasps', action='store_true', default=True, help='Filter grasp contacts according to segmap.')
+ parser.add_argument('--skip_border_objects', action='store_true', default=False, help='When extracting local_regions, ignore segments at depth map boundary.')
+ parser.add_argument('--forward_passes', type=int, default=1, help='Run multiple parallel forward passes to mesh_utils more potential contact points.')
+ parser.add_argument('--arg_configs', nargs="*", type=str, default=[], help='overwrite config parameters')
+ FLAGS = parser.parse_args()
+
+ global_config = config_utils.load_config(FLAGS.ckpt_dir, batch_size=FLAGS.forward_passes, arg_configs=FLAGS.arg_configs)
+
+ print(str(global_config))
+ print('pid: %s'%(str(os.getpid())))
+
+ inference(global_config,
+ FLAGS.ckpt_dir,
+ FLAGS.np_path,
+ local_regions=FLAGS.local_regions,
+ filter_grasps=FLAGS.filter_grasps,
+ skip_border_objects=FLAGS.skip_border_objects,
+ z_range=eval(str(FLAGS.z_range)),
+ forward_passes=FLAGS.forward_passes,
+ K=eval(str(FLAGS.K)))
\ No newline at end of file
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/mesh_utils.py b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/mesh_utils.py
new file mode 100644
index 0000000000..c1d5658d7f
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/mesh_utils.py
@@ -0,0 +1,303 @@
+# -*- coding: utf-8 -*-
+"""Helper classes and functions to sample grasps for a given object mesh."""
+
+from __future__ import print_function
+
+import argparse
+from collections import OrderedDict
+import errno
+import json
+import os
+import numpy as np
+import pickle
+from tqdm import tqdm
+import trimesh
+import trimesh.transformations as tra
+
+import torch
+
+# import tensorflow.compat.v1 as tf
+
+class Object(object):
+ """Represents a graspable object."""
+
+ def __init__(self, filename):
+ """Constructor.
+
+ :param filename: Mesh to load
+ :param scale: Scaling factor
+ """
+ try:
+ self.mesh = trimesh.load(filename)
+ except:
+ print('Could not load mesh from filename: {}'.format(filename))
+ raise ValueError('Could not load mesh from filename: {}'.format(filename))
+
+ self.scale = 1.0
+
+ # print(filename)
+ self.filename = filename
+ if isinstance(self.mesh, list):
+ # this is fixed in a newer trimesh version:
+ # https://github.com/mikedh/trimesh/issues/69
+ print("Warning: Will do a concatenation")
+ self.mesh = trimesh.util.concatenate(self.mesh)
+
+ self.collision_manager = trimesh.collision.CollisionManager()
+ self.collision_manager.add_object('object', self.mesh)
+
+ def rescale(self, scale=1.0):
+ """Set scale of object mesh.
+
+ :param scale
+ """
+ self.scale = scale
+ self.mesh.apply_scale(self.scale)
+
+ def resize(self, size=1.0):
+ """Set longest of all three lengths in Cartesian space.
+
+ :param size
+ """
+ self.scale = size / np.max(self.mesh.extents)
+ self.mesh.apply_scale(self.scale)
+
+ def in_collision_with(self, mesh, transform):
+ """Check whether the object is in collision with the provided mesh.
+
+ :param mesh:
+ :param transform:
+ :return: boolean value
+ """
+ return self.collision_manager.in_collision_single(mesh, transform=transform)
+
+
+class PandaGripper(object):
+ """An object representing a Franka Panda gripper."""
+
+ def __init__(self, q=None, num_contact_points_per_finger=10, root_folder=os.path.dirname(os.path.dirname(os.path.abspath(__file__)))):
+ """Create a Franka Panda parallel-yaw gripper object.
+
+ Keyword Arguments:
+ q {list of int} -- opening configuration (default: {None})
+ num_contact_points_per_finger {int} -- contact points per finger (default: {10})
+ root_folder {str} -- base folder for model files (default: {''})
+ """
+ self.joint_limits = [0.0, 0.04]
+ self.root_folder = root_folder
+
+ self.default_pregrasp_configuration = 0.04
+ if q is None:
+ q = self.default_pregrasp_configuration
+
+ self.q = q
+ fn_base = os.path.join(root_folder, 'gripper_models/panda_gripper/hand.stl')
+ fn_finger = os.path.join(root_folder, 'gripper_models/panda_gripper/finger.stl')
+
+ self.base = trimesh.load(fn_base)
+ self.finger_l = trimesh.load(fn_finger)
+ self.finger_r = self.finger_l.copy()
+
+ # transform fingers relative to the base
+ self.finger_l.apply_transform(tra.euler_matrix(0, 0, np.pi))
+ self.finger_l.apply_translation([+q, 0, 0.0584])
+ self.finger_r.apply_translation([-q, 0, 0.0584])
+
+ self.fingers = trimesh.util.concatenate([self.finger_l, self.finger_r])
+ self.hand = trimesh.util.concatenate([self.fingers, self.base])
+
+
+ self.contact_ray_origins = []
+ self.contact_ray_directions = []
+
+ # coords_path = os.path.join(root_folder, 'gripper_control_points/panda_gripper_coords.npy')
+ pickle_path = os.path.join(root_folder,'gripper_control_points/panda_gripper_coords.pickle')
+ try:
+ with open(pickle_path, 'rb') as f:
+ # Try different pickle loading strategies
+ try:
+ self.finger_coords = pickle.load(f, encoding='latin1')
+ except:
+ # Reset file pointer
+ f.seek(0)
+ try:
+ self.finger_coords = pickle.load(f, encoding='bytes')
+ except:
+ # Reset file pointer
+ f.seek(0)
+ self.finger_coords = pickle.load(f)
+ except Exception as e:
+ print(f"Error loading gripper model from {pickle_path}: {e}")
+ # Fallback to hardcoded control points if pickle fails
+ self.finger_coords = {
+ 'gripper_left_center_flat': np.array([0.0, 0.0, 0.0]),
+ 'gripper_right_center_flat': np.array([0.08, 0.0, 0.0])
+ }
+ finger_direction = self.finger_coords['gripper_right_center_flat'] - self.finger_coords['gripper_left_center_flat']
+ self.contact_ray_origins.append(np.r_[self.finger_coords['gripper_left_center_flat'], 1])
+ self.contact_ray_origins.append(np.r_[self.finger_coords['gripper_right_center_flat'], 1])
+ self.contact_ray_directions.append(finger_direction / np.linalg.norm(finger_direction))
+ self.contact_ray_directions.append(-finger_direction / np.linalg.norm(finger_direction))
+
+ self.contact_ray_origins = np.array(self.contact_ray_origins)
+ self.contact_ray_directions = np.array(self.contact_ray_directions)
+
+ def get_meshes(self):
+ """Get list of meshes that this gripper consists of.
+
+ Returns:
+ list of trimesh -- visual meshes
+ """
+ return [self.finger_l, self.finger_r, self.base]
+
+ def get_closing_rays_contact(self, transform):
+ """Get an array of rays defining the contact locations and directions on the hand.
+
+ Arguments:
+ transform {[nump.array]} -- a 4x4 homogeneous matrix
+ contact_ray_origin {[nump.array]} -- a 4x1 homogeneous vector
+ contact_ray_direction {[nump.array]} -- a 4x1 homogeneous vector
+
+ Returns:
+ numpy.array -- transformed rays (origin and direction)
+ """
+ return transform[:3, :].dot(
+ self.contact_ray_origins.T).T, transform[:3, :3].dot(self.contact_ray_directions.T).T
+
+ def get_control_point_tensor(self, batch_size, use_torch=True, symmetric = False, convex_hull=True):
+ """
+ Outputs a 5 point gripper representation of shape (batch_size x 5 x 3).
+
+ Arguments:
+ batch_size {int} -- batch size
+
+ Keyword Arguments:
+ use_tf {bool} -- outputing a tf tensor instead of a numpy array (default: {True})
+ symmetric {bool} -- Output the symmetric control point configuration of the gripper (default: {False})
+ convex_hull {bool} -- Return control points according to the convex hull panda gripper model (default: {True})
+
+ Returns:
+ np.ndarray -- control points of the panda gripper
+ """
+
+ control_points = np.load(os.path.join(self.root_folder, 'gripper_control_points/panda.npy'))[:, :3]
+ if symmetric:
+ control_points = [[0, 0, 0], control_points[1, :],control_points[0, :], control_points[-1, :], control_points[-2, :]]
+ else:
+ control_points = [[0, 0, 0], control_points[0, :], control_points[1, :], control_points[-2, :], control_points[-1, :]]
+
+ control_points = np.asarray(control_points, dtype=np.float32)
+ if not convex_hull:
+ # actual depth of the gripper different from convex collision model
+ control_points[1:3, 2] = 0.0584
+ control_points = np.tile(np.expand_dims(control_points, 0), [batch_size, 1, 1])
+
+ if use_torch:
+ return torch.from_numpy(control_points)
+ return tf.convert_to_tensor(control_points)
+
+ return control_points
+
+
+def create_gripper(name, configuration=None, root_folder=os.path.dirname(os.path.dirname(os.path.abspath(__file__)))):
+ """Create a gripper object.
+
+ Arguments:
+ name {str} -- name of the gripper
+
+ Keyword Arguments:
+ configuration {list of float} -- configuration (default: {None})
+ root_folder {str} -- base folder for model files (default: {''})
+
+ Raises:
+ Exception: If the gripper name is unknown.
+
+ Returns:
+ [type] -- gripper object
+ """
+ if name.lower() == 'panda':
+ return PandaGripper(q=configuration, root_folder=root_folder)
+ else:
+ raise Exception("Unknown gripper: {}".format(name))
+
+
+def in_collision_with_gripper(object_mesh, gripper_transforms, gripper_name, silent=False):
+ """Check collision of object with gripper.
+
+ Arguments:
+ object_mesh {trimesh} -- mesh of object
+ gripper_transforms {list of numpy.array} -- homogeneous matrices of gripper
+ gripper_name {str} -- name of gripper
+
+ Keyword Arguments:
+ silent {bool} -- verbosity (default: {False})
+
+ Returns:
+ [list of bool] -- Which gripper poses are in collision with object mesh
+ """
+ manager = trimesh.collision.CollisionManager()
+ manager.add_object('object', object_mesh)
+ gripper_meshes = [create_gripper(gripper_name).hand]
+ min_distance = []
+ for tf in tqdm(gripper_transforms, disable=silent):
+ min_distance.append(np.min([manager.min_distance_single(
+ gripper_mesh, transform=tf) for gripper_mesh in gripper_meshes]))
+
+ return [d == 0 for d in min_distance], min_distance
+
+def grasp_contact_location(transforms, successfuls, collisions, object_mesh, gripper_name='panda', silent=False):
+ """Computes grasp contacts on objects and normals, offsets, directions
+
+ Arguments:
+ transforms {[type]} -- grasp poses
+ collisions {[type]} -- collision information
+ object_mesh {trimesh} -- object mesh
+
+ Keyword Arguments:
+ gripper_name {str} -- name of gripper (default: {'panda'})
+ silent {bool} -- verbosity (default: {False})
+
+ Returns:
+ list of dicts of contact information per grasp ray
+ """
+ res = []
+ gripper = create_gripper(gripper_name)
+ if trimesh.ray.has_embree:
+ intersector = trimesh.ray.ray_pyembree.RayMeshIntersector(
+ object_mesh, scale_to_box=True)
+ else:
+ intersector = trimesh.ray.ray_triangle.RayMeshIntersector(object_mesh)
+ for p, colliding, outcome in tqdm(zip(transforms, collisions, successfuls), total=len(transforms), disable=silent):
+ contact_dict = {}
+ contact_dict['collisions'] = 0
+ contact_dict['valid_locations'] = 0
+ contact_dict['successful'] = outcome
+ contact_dict['grasp_transform'] = p
+ contact_dict['contact_points'] = []
+ contact_dict['contact_directions'] = []
+ contact_dict['contact_face_normals'] = []
+ contact_dict['contact_offsets'] = []
+
+ if colliding:
+ contact_dict['collisions'] = 1
+ else:
+ ray_origins, ray_directions = gripper.get_closing_rays_contact(p)
+
+ locations, index_ray, index_tri = intersector.intersects_location(
+ ray_origins, ray_directions, multiple_hits=False)
+
+ if len(locations) > 0:
+ # this depends on the width of the gripper
+ valid_locations = np.linalg.norm(ray_origins[index_ray]-locations, axis=1) <= 2.0*gripper.q
+
+ if sum(valid_locations) > 1:
+ contact_dict['valid_locations'] = 1
+ contact_dict['contact_points'] = locations[valid_locations]
+ contact_dict['contact_face_normals'] = object_mesh.face_normals[index_tri[valid_locations]]
+ contact_dict['contact_directions'] = ray_directions[index_ray[valid_locations]]
+ contact_dict['contact_offsets'] = np.linalg.norm(ray_origins[index_ray[valid_locations]] - locations[valid_locations], axis=1)
+ # dot_prods = (contact_dict['contact_face_normals'] * contact_dict['contact_directions']).sum(axis=1)
+ # contact_dict['contact_cosine_angles'] = np.cos(dot_prods)
+ res.append(contact_dict)
+
+ return res
\ No newline at end of file
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/render_acronym.py b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/render_acronym.py
new file mode 100644
index 0000000000..33b63a9626
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/render_acronym.py
@@ -0,0 +1,375 @@
+# Pre-render all the camera poses and scenes for the acronym dataset
+import os
+import copy
+import random
+import cv2
+import glob
+import numpy as np
+import trimesh.transformations as tra
+from tqdm import tqdm
+from multiprocessing import Pool, cpu_count, current_process,shared_memory
+
+
+from contact_graspnet_pytorch import config_utils, utils
+from contact_graspnet_pytorch.datagen_scene_renderer import SceneRenderer
+
+os.environ['PYOPENGL_PLATFORM'] = 'egl' # To get pyrender to work
+
+class AcronymRender():
+ def __init__(self, global_config):
+ """
+ Class to render acronym dataset
+ """
+ self.data_path = global_config['DATA']['data_path']
+ self.scene_contacts_dir, self.valid_scene_contacts = \
+ self._check_scene_contacts(self.data_path,
+ global_config['DATA']['scene_contacts_path'])
+
+ self.intrinsics=global_config['DATA']['intrinsics']
+ elevation=global_config['DATA']['view_sphere']['elevation']
+
+ self._raw_num_points = global_config['DATA']['raw_num_points']
+ self._use_farthest_point = global_config['DATA']['use_farthest_point']
+ self._distance_range = global_config['DATA']['view_sphere']['distance_range']
+
+ self._depth_augm_config = global_config['DATA']['depth_augm']
+
+
+ self._current_pc = None
+ self._cache = {}
+
+ # self._renderer = SceneRenderer(caching=False, intrinsics=intrinsics)
+
+ elevation = global_config['DATA']['view_sphere']['elevation']
+ self._cam_orientations = []
+ self._elevation = np.array(elevation)/180.
+ for az in np.linspace(0, np.pi * 2, 30):
+ for el in np.linspace(self._elevation[0], self._elevation[1], 30):
+ self._cam_orientations.append(tra.euler_matrix(0, -el, az))
+ self._coordinate_transform = tra.euler_matrix(np.pi/2, 0, 0).dot(tra.euler_matrix(0, np.pi/2, 0))
+
+ self.num_cam_poses = 200
+
+ self._estimate_normals = global_config['DATA']['input_normals']
+ self._return_segmap = False
+
+ def render_acronym(self):
+ """
+ Render all the scenes in the acronym dataset
+ """
+ # for scene_id in tqdm(self.valid_scene_contacts):
+ # self.render_scene(scene_id)
+
+ n_pools = min(10, cpu_count()-2)
+ with Pool(n_pools) as p:
+ list(tqdm(
+ p.imap_unordered(self.render_scene, self.valid_scene_contacts),
+ total=len(self.valid_scene_contacts),
+ position=0))
+ # p.map(self.render_scene, self.valid_scene_contacts)
+
+ # examples = list(
+ # tqdm(
+ # p.imap_unordered(self.render_scene, self.valid_scene_contacts),
+ # total=len(self.valid_scene_contacts)
+ # )
+ # )
+ # with Pool(cpu_count()-2) as p:
+ # examples = list(
+ # tqdm(
+ # p.imap_unordered(self.render_scene, self.valid_scene_contacts),
+ # total=len(self.valid_scene_contacts)
+ # )
+ # )
+
+ def render_scene(self, scene_id):
+ """
+ Render a scene at all poses
+ """
+ # Data is indexed as follows:
+ # For each scene, we have a set of camera poses. The scene index is
+ # index // num_cam_poses, the camera pose index is index % num_cam_poses
+
+
+ # Get lowest id not in use
+ # tqdm_id = random.randint(0, 10)
+ tqdm_id = int(scene_id) + 1
+
+ renderer = SceneRenderer(caching=False, intrinsics=self.intrinsics)
+
+ if self._estimate_normals:
+ raise NotImplementedError
+
+ if self._return_segmap:
+ raise NotImplementedError
+
+ scene_info = np.load(os.path.join(self.scene_contacts_dir, scene_id + '.npz'), allow_pickle=False)
+
+ scene_contact_points = scene_info['scene_contact_points']
+ obj_paths = scene_info['obj_paths']
+ obj_transforms = scene_info['obj_transforms']
+ obj_scales = scene_info['obj_scales']
+ grasp_transforms = scene_info['grasp_transforms']
+
+ render_dir_path = os.path.join(self.data_path, 'renders')
+
+ # TODO: Fix this in the paths
+ # for i in range(obj_paths.shape[0]):
+ # parts = obj_paths[i].split('/')
+ # obj_paths[i] = os.path.join(*parts[0:-2], parts[-1])
+
+ # -- Build and Render Scene -- #
+ obj_paths = [os.path.join(self.data_path, p) for p in obj_paths]
+ # self.change_scene(renderer, obj_paths, obj_scales, obj_transforms, visualize=False)
+ try:
+ self.change_scene(renderer, obj_paths, obj_scales, obj_transforms, visualize=False)
+ except:
+ print('Error loading scene %s' % scene_id)
+ return
+
+ if not os.path.exists(os.path.join(render_dir_path, scene_id)):
+ os.makedirs(os.path.join(render_dir_path, scene_id))
+
+ # -- Check if we already rendered this -- #
+ # Useful for restarting rendering
+ num_elements = len(os.listdir(os.path.join(render_dir_path, scene_id)))
+ if num_elements >= self.num_cam_poses:
+ return
+
+ sampled_ori = random.sample(self._cam_orientations, self.num_cam_poses)
+ pbar = tqdm(sampled_ori, position=tqdm_id)
+ # pbar = self._cam_orientations
+ for i, cam_ori in enumerate(pbar):
+ save_pose_path = os.path.join(render_dir_path, scene_id, f'{i:03}.npz')
+ if os.path.exists(save_pose_path):
+ continue
+ camera_pose = self.get_cam_pose(renderer, cam_ori)
+ pc_cam, pc_normals, camera_pose, depth = self.render_pose(renderer, camera_pose, estimate_normals=self._estimate_normals)
+
+ # Convert from OpenGL to OpenCV Coordinates
+ camera_pose, pc_cam = self._center_pc_convert_cam(camera_pose, pc_cam)
+
+ np.savez_compressed(save_pose_path, pc_cam=pc_cam, camera_pose=camera_pose)
+
+
+ def change_scene(self, renderer, obj_paths, obj_scales, obj_transforms, visualize=False):
+ """
+ Change pyrender scene
+
+ Arguments:
+ obj_paths {list[str]} -- path to CAD models in scene
+ obj_scales {list[float]} -- scales of CAD models
+ obj_transforms {list[np.ndarray]} -- poses of CAD models
+
+ Keyword Arguments:
+ visualize {bool} -- whether to update the visualizer as well (default: {False})
+ """
+ renderer.change_scene(obj_paths, obj_scales, obj_transforms)
+ if visualize:
+ self._visualizer.change_scene(obj_paths, obj_scales, obj_transforms)
+
+
+ def render_pose(self, renderer, camera_pose, estimate_normals=False):
+ """
+ Renders scene depth map, transforms to regularized pointcloud.
+ Note, we do not apply augmentations
+
+ Arguments:
+ camera_pose {[type]} -- camera pose to render the scene from. (default: {None})
+
+ Keyword Arguments:
+ estimate_normals {bool} -- calculate and return normals (default: {False})
+
+ Returns:
+ [pc, pc_normals, camera_pose, depth] -- [point cloud, point cloud normals, camera pose, depth]
+ """
+ in_camera_pose = copy.deepcopy(camera_pose)
+
+ # 0.005 s
+ _, depth, _, camera_pose = renderer.render(in_camera_pose, render_pc=False)
+ depth = self._augment_depth(depth)
+
+ pc = renderer._to_pointcloud(depth)
+ pc = self._regularize_pc_point_count(pc, self._raw_num_points, use_farthest_point=self._use_farthest_point)
+ # pc = self._augment_pc(pc)
+
+ if estimate_normals:
+ raise NotImplementedError
+ else:
+ pc_normals = []
+ # pc_normals = estimate_normals_cam_from_pc(pc[:,:3], raw_num_points=self._raw_num_points) if estimate_normals else []
+
+ return pc, pc_normals, camera_pose, depth
+
+ def _augment_depth(self, depth):
+ """
+ Augments depth map with z-noise and smoothing according to config
+
+ Arguments:
+ depth {np.ndarray} -- depth map
+
+ Returns:
+ np.ndarray -- augmented depth map
+ """
+
+ if 'sigma' in self._depth_augm_config and self._depth_augm_config['sigma'] > 0:
+ clip = self._depth_augm_config['clip']
+ sigma = self._depth_augm_config['sigma']
+ noise = np.clip(sigma*np.random.randn(*depth.shape), -clip, clip)
+ depth += noise
+ if 'gaussian_kernel' in self._depth_augm_config and self._depth_augm_config['gaussian_kernel'] > 0:
+ kernel = self._depth_augm_config['gaussian_kernel']
+ depth_copy = depth.copy()
+ depth = cv2.GaussianBlur(depth,(kernel,kernel),0)
+ depth[depth_copy==0] = depth_copy[depth_copy==0]
+
+ return depth
+
+ def get_cam_pose(self, renderer, cam_orientation):
+ """
+ Samples camera pose on shell around table center
+
+ Arguments:
+ cam_orientation {np.ndarray} -- 3x3 camera orientation matrix
+
+ Returns:
+ [np.ndarray] -- 4x4 homogeneous camera pose
+ """
+
+ distance = self._distance_range[0] + np.random.rand()*(self._distance_range[1]-self._distance_range[0])
+
+ extrinsics = np.eye(4)
+ extrinsics[0, 3] += distance
+ extrinsics = cam_orientation.dot(extrinsics)
+
+ cam_pose = extrinsics.dot(self._coordinate_transform)
+ # table height
+ cam_pose[2,3] += renderer._table_dims[2]
+ cam_pose[:3,:2]= -cam_pose[:3,:2]
+ return cam_pose
+
+ def _check_scene_contacts(self, dataset_folder, scene_contacts_path='scene_contacts_new'):
+ """
+ Attempt to load each scene contact in the scene_contacts_path directory.
+ Returns a list of all valid paths
+
+ Arguments:
+ dataset_folder {str} -- path to dataset folder
+ scene_contacts_path {str} -- path to scene contacts
+
+ Returns:
+ list(str) -- list of valid scene contact paths
+ """
+ scene_contacts_dir = os.path.join(dataset_folder, scene_contacts_path)
+ scene_contacts_paths = sorted(glob.glob(os.path.join(scene_contacts_dir, '*')))
+ valid_scene_contacts = []
+
+ # Faster
+ corrupt_list = [
+ # Won't load
+ '001780',
+ '002100',
+ '002486',
+ '004861',
+ '004866',
+ '007176',
+ '008283',
+
+ # Newer won't load
+ '005723'
+ '001993',
+ '001165',
+ '006828',
+ '007238',
+ '008980',
+
+ # No contacts
+ # '008509',
+ # '007996',
+ # '009296',
+
+ # '001439',
+ # '006090',
+ # '002731',
+ # '001519',
+ ]
+
+
+ # Just check if each scene is in the bad list
+ for contact_path in scene_contacts_paths:
+ if contact_path.split('/')[-1].split('.')[0] in corrupt_list:
+ continue
+ contact_id = contact_path.split('/')[-1].split('.')[0]
+ valid_scene_contacts.append(contact_id)
+ return scene_contacts_dir, valid_scene_contacts
+
+ @staticmethod
+ def _regularize_pc_point_count(pc, npoints, use_farthest_point=False):
+ """
+ If point cloud pc has less points than npoints, it oversamples.
+ Otherwise, it downsample the input pc to have npoint points.
+ use_farthest_point: indicates
+
+ :param pc: Nx3 point cloud
+ :param npoints: number of points the regularized point cloud should have
+ :param use_farthest_point: use farthest point sampling to downsample the points, runs slower.
+ :returns: npointsx3 regularized point cloud
+ """
+
+ def distance_by_translation_point(p1, p2):
+ """
+ Gets two nx3 points and computes the distance between point p1 and p2.
+ """
+ return np.sqrt(np.sum(np.square(p1 - p2), axis=-1))
+
+ if pc.shape[0] > npoints:
+ if use_farthest_point:
+ _, center_indexes = utils.farthest_points(pc, npoints, distance_by_translation_point, return_center_indexes=True)
+ else:
+ center_indexes = np.random.choice(range(pc.shape[0]), size=npoints, replace=False)
+ pc = pc[center_indexes, :]
+ else:
+ required = npoints - pc.shape[0]
+ if required > 0:
+ index = np.random.choice(range(pc.shape[0]), size=required)
+ pc = np.concatenate((pc, pc[index, :]), axis=0)
+ return pc
+
+ @staticmethod
+ def _center_pc_convert_cam(cam_pose, point_clouds):
+ """
+ Converts from OpenGL to OpenCV coordinates, computes inverse of camera pose and centers point cloud
+
+ :param cam_poses: (bx4x4) Camera poses in OpenGL format
+ :param batch_data: (bxNx3) point clouds
+ :returns: (cam_poses, batch_data) converted
+ """
+
+ # OpenCV OpenGL conversion
+ cam_pose[:3, 1] = -cam_pose[:3, 1]
+ cam_pose[:3, 2] = -cam_pose[:3, 2]
+ cam_pose = utils.inverse_transform(cam_pose)
+
+ pc_mean = np.mean(point_clouds, axis=0, keepdims=True)
+ point_clouds[:,:3] -= pc_mean[:,:3]
+ cam_pose[:3,3] -= pc_mean[0,:3]
+
+ return cam_pose, point_clouds
+
+
+
+
+
+if __name__ == '__main__':
+
+ ckpt_dir = 'checkpoints/contact_graspnet'
+ forward_passes = 1
+ arg_configs = []
+
+ # config_dir = 'contact_graspnet_pytorch'
+ global_config = config_utils.load_config(ckpt_dir, batch_size=forward_passes, arg_configs=arg_configs)
+
+ render = AcronymRender(global_config)
+ render.render_acronym()
+
+ # render.render_scene('000001')
\ No newline at end of file
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/render_acronym_multicore.py b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/render_acronym_multicore.py
new file mode 100644
index 0000000000..410abdea0a
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/render_acronym_multicore.py
@@ -0,0 +1,378 @@
+# NO LONGER USED
+
+
+# Pre-render all the camera poses and scenes for the acronym dataset
+import os
+import copy
+import random
+import glob
+import numpy as np
+import trimesh.transformations as tra
+from tqdm import tqdm
+from multiprocessing import Pool, cpu_count, current_process,shared_memory
+
+
+from contact_graspnet_pytorch import config_utils, utils
+from contact_graspnet_pytorch.datagen_scene_renderer import SceneRenderer
+
+os.environ['PYOPENGL_PLATFORM'] = 'egl' # To get pyrender to work
+
+class AcronymRenderScene():
+ def __init__(self, global_config):
+ """
+ Class to render acronym dataset
+ """
+ self.data_path = global_config['DATA']['data_path']
+ self.scene_contacts_dir, self.valid_scene_contacts = \
+ self._check_scene_contacts(self.data_path,
+ global_config['DATA']['scene_contacts_path'])
+
+ self.intrinsics=global_config['DATA']['intrinsics']
+ elevation=global_config['DATA']['view_sphere']['elevation']
+
+ self._raw_num_points = global_config['DATA']['raw_num_points']
+ self._use_farthest_point = global_config['DATA']['use_farthest_point']
+ self._distance_range = global_config['DATA']['view_sphere']['distance_range']
+
+
+ self._current_pc = None
+ self._cache = {}
+
+ # self._renderer = SceneRenderer(caching=False, intrinsics=intrinsics)
+
+ elevation = global_config['DATA']['view_sphere']['elevation']
+ self._cam_orientations = []
+ self._elevation = np.array(elevation)/180.
+ for az in np.linspace(0, np.pi * 2, 30):
+ for el in np.linspace(self._elevation[0], self._elevation[1], 30):
+ self._cam_orientations.append(tra.euler_matrix(0, -el, az))
+ self._coordinate_transform = tra.euler_matrix(np.pi/2, 0, 0).dot(tra.euler_matrix(0, np.pi/2, 0))
+
+ self.num_cam_poses = len(self._cam_orientations)
+
+ self._estimate_normals = global_config['DATA']['input_normals']
+ self._return_segmap = False
+
+
+ def render_scene(self, scene_id):
+ """
+ Render a scene at all poses
+ """
+ # Data is indexed as follows:
+ # For each scene, we have a set of camera poses. The scene index is
+ # index // num_cam_poses, the camera pose index is index % num_cam_poses
+
+
+ # Get lowest id not in use
+ tqdm_id = int(scene_id) + 1
+
+ renderer = SceneRenderer(caching=False, intrinsics=self.intrinsics)
+
+ if self._estimate_normals:
+ raise NotImplementedError
+
+ if self._return_segmap:
+ raise NotImplementedError
+
+ scene_info = np.load(os.path.join(self.scene_contacts_dir, scene_id + '.npz'), allow_pickle=False)
+
+ scene_contact_points = scene_info['scene_contact_points']
+ obj_paths = scene_info['obj_paths']
+ obj_transforms = scene_info['obj_transforms']
+ obj_scales = scene_info['obj_scales']
+ grasp_transforms = scene_info['grasp_transforms']
+
+ render_dir_path = os.path.join(self.data_path, 'renders')
+
+ # TODO: Fix this in the paths
+ # for i in range(obj_paths.shape[0]):
+ # parts = obj_paths[i].split('/')
+ # obj_paths[i] = os.path.join(*parts[0:-2], parts[-1])
+
+ # -- Build and Render Scene -- #
+ obj_paths = [os.path.join(self.data_path, p) for p in obj_paths]
+ self.change_scene(renderer, obj_paths, obj_scales, obj_transforms, visualize=False)
+ # try:
+ # self.change_scene(renderer, obj_paths, obj_scales, obj_transforms, visualize=False)
+ # except:
+ # print('Error loading scene %s' % scene_id)
+ # return
+
+ if not os.path.exists(os.path.join(render_dir_path, scene_id)):
+ os.makedirs(os.path.join(render_dir_path, scene_id))
+ pbar = tqdm(self._cam_orientations, position=tqdm_id)
+ # pbar = self._cam_orientations
+ for i, cam_ori in enumerate(pbar):
+ save_pose_path = os.path.join(render_dir_path, scene_id, f'{i:03}.npz')
+ if os.path.exists(save_pose_path):
+ continue
+ camera_pose = self.get_cam_pose(renderer, cam_ori)
+ pc_cam, pc_normals, camera_pose, depth = self.render_pose(renderer, camera_pose, estimate_normals=self._estimate_normals)
+
+ # Convert from OpenGL to OpenCV Coordinates
+ camera_pose, pc_cam = self._center_pc_convert_cam(camera_pose, pc_cam)
+
+ np.savez(save_pose_path, pc_cam=pc_cam, camera_pose=camera_pose)
+
+
+ def change_scene(self, renderer, obj_paths, obj_scales, obj_transforms, visualize=False):
+ """
+ Change pyrender scene
+
+ Arguments:
+ obj_paths {list[str]} -- path to CAD models in scene
+ obj_scales {list[float]} -- scales of CAD models
+ obj_transforms {list[np.ndarray]} -- poses of CAD models
+
+ Keyword Arguments:
+ visualize {bool} -- whether to update the visualizer as well (default: {False})
+ """
+ renderer.change_scene(obj_paths, obj_scales, obj_transforms)
+ if visualize:
+ self._visualizer.change_scene(obj_paths, obj_scales, obj_transforms)
+
+
+ def render_pose(self, renderer, camera_pose, estimate_normals=False):
+ """
+ Renders scene depth map, transforms to regularized pointcloud.
+ Note, we do not apply augmentations
+
+ Arguments:
+ camera_pose {[type]} -- camera pose to render the scene from. (default: {None})
+
+ Keyword Arguments:
+ estimate_normals {bool} -- calculate and return normals (default: {False})
+
+ Returns:
+ [pc, pc_normals, camera_pose, depth] -- [point cloud, point cloud normals, camera pose, depth]
+ """
+ in_camera_pose = copy.deepcopy(camera_pose)
+
+ # 0.005 s
+ _, depth, _, camera_pose = renderer.render(in_camera_pose, render_pc=False)
+ # depth = self._augment_depth(depth)
+
+ pc = renderer._to_pointcloud(depth)
+ pc = self._regularize_pc_point_count(pc, self._raw_num_points, use_farthest_point=self._use_farthest_point)
+ # pc = self._augment_pc(pc)
+
+ if estimate_normals:
+ raise NotImplementedError
+ else:
+ pc_normals = []
+ # pc_normals = estimate_normals_cam_from_pc(pc[:,:3], raw_num_points=self._raw_num_points) if estimate_normals else []
+
+ return pc, pc_normals, camera_pose, depth
+
+ def get_cam_pose(self, renderer, cam_orientation):
+ """
+ Samples camera pose on shell around table center
+
+ Arguments:
+ cam_orientation {np.ndarray} -- 3x3 camera orientation matrix
+
+ Returns:
+ [np.ndarray] -- 4x4 homogeneous camera pose
+ """
+
+ distance = self._distance_range[0] + np.random.rand()*(self._distance_range[1]-self._distance_range[0])
+
+ extrinsics = np.eye(4)
+ extrinsics[0, 3] += distance
+ extrinsics = cam_orientation.dot(extrinsics)
+
+ cam_pose = extrinsics.dot(self._coordinate_transform)
+ # table height
+ cam_pose[2,3] += renderer._table_dims[2]
+ cam_pose[:3,:2]= -cam_pose[:3,:2]
+ return cam_pose
+
+ def _check_scene_contacts(self, dataset_folder, scene_contacts_path='scene_contacts_new'):
+ """
+ Attempt to load each scene contact in the scene_contacts_path directory.
+ Returns a list of all valid paths
+
+ Arguments:
+ dataset_folder {str} -- path to dataset folder
+ scene_contacts_path {str} -- path to scene contacts
+
+ Returns:
+ list(str) -- list of valid scene contact paths
+ """
+ scene_contacts_dir = os.path.join(dataset_folder, scene_contacts_path)
+ scene_contacts_paths = sorted(glob.glob(os.path.join(scene_contacts_dir, '*')))
+ valid_scene_contacts = []
+
+ # Faster
+ corrupt_list = [
+ # Won't load
+ '001780',
+ '002100',
+ '002486',
+ '004861',
+ '004866',
+ '007176',
+ '008283',
+
+ # No contacts
+ '008509',
+ '007996',
+ '009296',
+
+ # '001439',
+ # '006090',
+ # '002731',
+ # '001519',
+ ]
+
+
+ # Just check if each scene is in the bad list
+ for contact_path in scene_contacts_paths:
+ if contact_path.split('/')[-1].split('.')[0] in corrupt_list:
+ continue
+ contact_id = contact_path.split('/')[-1].split('.')[0]
+ valid_scene_contacts.append(contact_id)
+ return scene_contacts_dir, valid_scene_contacts
+
+ @staticmethod
+ def _regularize_pc_point_count(pc, npoints, use_farthest_point=False):
+ """
+ If point cloud pc has less points than npoints, it oversamples.
+ Otherwise, it downsample the input pc to have npoint points.
+ use_farthest_point: indicates
+
+ :param pc: Nx3 point cloud
+ :param npoints: number of points the regularized point cloud should have
+ :param use_farthest_point: use farthest point sampling to downsample the points, runs slower.
+ :returns: npointsx3 regularized point cloud
+ """
+
+ def distance_by_translation_point(p1, p2):
+ """
+ Gets two nx3 points and computes the distance between point p1 and p2.
+ """
+ return np.sqrt(np.sum(np.square(p1 - p2), axis=-1))
+
+ if pc.shape[0] > npoints:
+ if use_farthest_point:
+ _, center_indexes = utils.farthest_points(pc, npoints, distance_by_translation_point, return_center_indexes=True)
+ else:
+ center_indexes = np.random.choice(range(pc.shape[0]), size=npoints, replace=False)
+ pc = pc[center_indexes, :]
+ else:
+ required = npoints - pc.shape[0]
+ if required > 0:
+ index = np.random.choice(range(pc.shape[0]), size=required)
+ pc = np.concatenate((pc, pc[index, :]), axis=0)
+ return pc
+
+ @staticmethod
+ def _center_pc_convert_cam(cam_pose, point_clouds):
+ """
+ Converts from OpenGL to OpenCV coordinates, computes inverse of camera pose and centers point cloud
+
+ :param cam_poses: (bx4x4) Camera poses in OpenGL format
+ :param batch_data: (bxNx3) point clouds
+ :returns: (cam_poses, batch_data) converted
+ """
+
+ # OpenCV OpenGL conversion
+ cam_pose[:3, 1] = -cam_pose[:3, 1]
+ cam_pose[:3, 2] = -cam_pose[:3, 2]
+ cam_pose = utils.inverse_transform(cam_pose)
+
+ pc_mean = np.mean(point_clouds, axis=0, keepdims=True)
+ point_clouds[:,:3] -= pc_mean[:,:3]
+ cam_pose[:3,3] -= pc_mean[0,:3]
+
+ return cam_pose, point_clouds
+
+
+# -- Multiprocessing rendering functions -- #
+
+def render_scene(data):
+ scene_id, global_config = data
+ acronym_render = AcronymRenderScene(global_config)
+ acronym_render.render_scene(scene_id)
+
+def check_scene_contacts(dataset_folder, scene_contacts_path='scene_contacts_new'):
+ """
+ Attempt to load each scene contact in the scene_contacts_path directory.
+ Returns a list of all valid paths
+
+ Arguments:
+ dataset_folder {str} -- path to dataset folder
+ scene_contacts_path {str} -- path to scene contacts
+
+ Returns:
+ list(str) -- list of valid scene contact paths
+ """
+ scene_contacts_dir = os.path.join(dataset_folder, scene_contacts_path)
+ scene_contacts_paths = sorted(glob.glob(os.path.join(scene_contacts_dir, '*')))
+ valid_scene_contacts = []
+
+ # Faster
+ corrupt_list = [
+ # Won't load
+ '001780',
+ '002100',
+ '002486',
+ '004861',
+ '004866',
+ '007176',
+ '008283',
+
+ # No contacts
+ '008509',
+ '007996',
+ '009296',
+
+ # '001439',
+ # '006090',
+ # '002731',
+ # '001519',
+ ]
+
+
+ # Just check if each scene is in the bad list
+ for contact_path in scene_contacts_paths:
+ if contact_path.split('/')[-1].split('.')[0] in corrupt_list:
+ continue
+ contact_id = contact_path.split('/')[-1].split('.')[0]
+ valid_scene_contacts.append(contact_id)
+ return scene_contacts_dir, valid_scene_contacts
+
+def render_acronym(global_config):
+ """
+ Render all the scenes in the acronym dataset
+ """
+ # for scene_id in tqdm(self.valid_scene_contacts):
+ # self.render_scene(scene_id)
+
+ data_path = global_config['DATA']['data_path']
+ scene_contacts_dir, valid_scene_contacts = check_scene_contacts(data_path,
+ global_config['DATA']['scene_contacts_path'])
+
+ data_list = [(id, global_config) for id in valid_scene_contacts]
+
+ n_pools = min(10, cpu_count()-2)
+ with Pool(n_pools) as p:
+ list(tqdm(
+ p.imap_unordered(render_scene, data_list),
+ total=len(valid_scene_contacts),
+ position=0))
+
+
+if __name__ == '__main__':
+
+ ckpt_dir = 'checkpoints/contact_graspnet'
+ forward_passes = 1
+ arg_configs = []
+
+ # config_dir = 'contact_graspnet_pytorch'
+ global_config = config_utils.load_config(ckpt_dir, batch_size=forward_passes, arg_configs=arg_configs)
+
+ render_acronym(global_config)
+
+
+ # render.render_scene('000001')
\ No newline at end of file
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/scene_renderer.py b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/scene_renderer.py
new file mode 100644
index 0000000000..1a34e57c69
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/scene_renderer.py
@@ -0,0 +1,293 @@
+from __future__ import print_function
+from __future__ import absolute_import
+
+import numpy as np
+import copy
+import cv2
+import math
+import os
+from copy import deepcopy
+import multiprocessing as mp
+import pyrender
+import trimesh
+import trimesh.transformations as tra
+
+from contact_graspnet_pytorch.mesh_utils import Object
+
+class SceneRenderer:
+ def __init__(self, intrinsics=None, fov=np.pi / 6, caching=True, viewing_mode=False):
+ """Renders depth with given intrinsics during training.
+
+ Keyword Arguments:
+ intrinsics {str} -- camera name from 'kinect_azure', 'realsense' (default: {None})
+ fov {float} -- field of view, ignored if inrinsics is not None (default: {np.pi/6})
+ caching {bool} -- whether to cache object meshes (default: {True})
+ viewing_mode {bool} -- visualize scene (default: {False})
+ """
+
+ self._fov = fov
+
+ self._scene = pyrender.Scene()
+ self._table_dims = [1.0, 1.2, 0.6]
+ self._table_pose = np.eye(4)
+ self._viewer = viewing_mode
+ if viewing_mode:
+ self._viewer = pyrender.Viewer(
+ self._scene,
+ use_raymond_lighting=True,
+ run_in_thread=True)
+
+ self._intrinsics = intrinsics
+ if self._intrinsics == 'realsense':
+ self._fx=616.36529541
+ self._fy=616.20294189
+ self._cx=310.25881958
+ self._cy=236.59980774
+ self._znear=0.04
+ self._zfar=20
+ self._height=480
+ self._width=640
+ elif self._intrinsics == 'kinect_azure':
+ self._fx=631.54864502
+ self._fy=631.20751953
+ self._cx=638.43517329
+ self._cy=366.49904066
+ self._znear=0.04
+ self._zfar=20
+ self._height=720
+ self._width=1280
+
+ self._add_table_node()
+ self._init_camera_renderer()
+
+ self._current_context = None
+ self._cache = {} if caching else None
+ self._caching = caching
+
+ def _init_camera_renderer(self):
+ """
+ If not in visualizing mode, initialize camera with given intrinsics
+ """
+
+ if self._viewer:
+ return
+
+ if self._intrinsics in ['kinect_azure', 'realsense']:
+ camera = pyrender.IntrinsicsCamera(self._fx, self._fy, self._cx, self._cy, self._znear, self._zfar)
+ self._camera_node = self._scene.add(camera, pose=np.eye(4), name='camera')
+ self.renderer = pyrender.OffscreenRenderer(viewport_width=self._width,
+ viewport_height=self._height,
+ point_size=1.0)
+ else:
+ camera = pyrender.PerspectiveCamera(yfov=self._fov, aspectRatio=1.0, znear=0.001) # do not change aspect ratio
+ self._camera_node = self._scene.add(camera, pose=tra.euler_matrix(np.pi, 0, 0), name='camera')
+ self.renderer = pyrender.OffscreenRenderer(400, 400)
+
+
+ def _add_table_node(self):
+ """
+ Adds table mesh and sets pose
+ """
+ if self._viewer:
+ return
+ table_mesh = trimesh.creation.box(self._table_dims)
+ mesh = pyrender.Mesh.from_trimesh(table_mesh)
+
+ table_node = pyrender.Node(mesh=mesh, name='table')
+ self._scene.add_node(table_node)
+ self._scene.set_pose(table_node, self._table_pose)
+
+
+ def _load_object(self, path, scale):
+ """
+ Load a mesh, scale and center it
+
+ Arguments:
+ path {str} -- path to mesh
+ scale {float} -- scale of the mesh
+
+ Returns:
+ dict -- contex with loaded mesh info
+ """
+ if (path, scale) in self._cache:
+ return self._cache[(path, scale)]
+ obj = Object(path)
+ obj.rescale(scale)
+
+ tmesh = obj.mesh
+ tmesh_mean = np.mean(tmesh.vertices, 0)
+ tmesh.vertices -= np.expand_dims(tmesh_mean, 0)
+
+ lbs = np.min(tmesh.vertices, 0)
+ ubs = np.max(tmesh.vertices, 0)
+ object_distance = np.max(ubs - lbs) * 5
+
+ mesh = pyrender.Mesh.from_trimesh(tmesh)
+
+ context = {
+ 'name': path + '_' + str(scale),
+ 'tmesh': copy.deepcopy(tmesh),
+ 'distance': object_distance,
+ 'node': pyrender.Node(mesh=mesh, name=path + '_' + str(scale)),
+ 'mesh_mean': np.expand_dims(tmesh_mean, 0),
+ }
+
+ self._cache[(path, scale)] = context
+
+ return self._cache[(path, scale)]
+
+ def change_scene(self, obj_paths, obj_scales, obj_transforms):
+ """Remove current objects and add new ones to the scene
+
+ Arguments:
+ obj_paths {list} -- list of object mesh paths
+ obj_scales {list} -- list of object scales
+ obj_transforms {list} -- list of object transforms
+ """
+ if self._viewer:
+ self._viewer.render_lock.acquire()
+ for n in self._scene.get_nodes():
+ if n.name not in ['table', 'camera', 'parent']:
+ self._scene.remove_node(n)
+
+ if not self._caching:
+ self._cache = {}
+
+ for p,t,s in zip(obj_paths, obj_transforms, obj_scales):
+
+ object_context = self._load_object(p, s)
+ object_context = deepcopy(object_context)
+
+ self._scene.add_node(object_context['node'])
+ self._scene.set_pose(object_context['node'], t)
+
+ if self._viewer:
+ self._viewer.render_lock.release()
+
+ def _to_pointcloud(self, depth):
+ """Convert depth map to point cloud
+
+ Arguments:
+ depth {np.ndarray} -- HxW depth map
+
+ Returns:
+ np.ndarray -- Nx4 homog. point cloud
+ """
+ if self._intrinsics in ['kinect_azure', 'realsense']:
+ fy = self._fy
+ fx = self._fx
+ height = self._height
+ width = self._width
+ cx = self._cx
+ cy = self._cy
+
+ mask = np.where(depth > 0)
+
+ x = mask[1]
+ y = mask[0]
+
+ normalized_x = (x.astype(np.float32) - cx)
+ normalized_y = (y.astype(np.float32) - cy)
+ else:
+ fy = fx = 0.5 / np.tan(self._fov * 0.5) # aspectRatio is one.
+ height = depth.shape[0]
+ width = depth.shape[1]
+
+ mask = np.where(depth > 0)
+
+ x = mask[1]
+ y = mask[0]
+
+ normalized_x = (x.astype(np.float32) - width * 0.5) / width
+ normalized_y = (y.astype(np.float32) - height * 0.5) / height
+
+ world_x = normalized_x * depth[y, x] / fx
+ world_y = normalized_y * depth[y, x] / fy
+ world_z = depth[y, x]
+ ones = np.ones(world_z.shape[0], dtype=np.float32)
+
+ return np.vstack((world_x, world_y, world_z, ones)).T
+
+
+ def render(self, pose, render_pc=True):
+ """Render object or scene in camera pose
+
+ Arguments:
+ pose {np.ndarray} -- 4x4 camera pose
+
+ Keyword Arguments:
+ render_pc {bool} -- whether to convert depth map to point cloud (default: {True})
+
+ Returns:
+ [np.ndarray, np.ndarray, np.ndarray, np.ndarray] -- HxWx3 color, HxW depth, Nx4 point cloud, 4x4 camera pose
+ """
+
+ transferred_pose = pose.copy()
+ self._scene.set_pose(self._camera_node, transferred_pose)
+
+ color, depth = self.renderer.render(self._scene)
+
+ if render_pc:
+ pc = self._to_pointcloud(depth)
+ else:
+ pc = None
+
+ return color, depth, pc, transferred_pose
+
+ def render_labels(self, full_depth, obj_paths, obj_scales, render_pc=False):
+ """Render instance segmentation map
+
+ Arguments:
+ full_depth {np.ndarray} -- HxW depth map
+ obj_paths {list} -- list of object paths in scene
+ obj_scales {list} -- list of object scales in scene
+
+ Keyword Arguments:
+ render_pc {bool} -- whether to return object-wise point clouds (default: {False})
+
+ Returns:
+ [np.ndarray, list, dict] -- integer segmap with 0=background, list of
+ corresponding object names, dict of corresponding point clouds
+ """
+
+ scene_object_nodes = []
+ for n in self._scene.get_nodes():
+ if n.name not in ['camera', 'parent']:
+ n.mesh.is_visible = False
+ if n.name != 'table':
+ scene_object_nodes.append(n)
+
+ obj_names = [path + '_' + str(scale) for path, scale in zip(obj_paths,obj_scales)]
+
+
+ pcs = {}
+ output = np.zeros(full_depth.shape, np.uint8)
+ for n in scene_object_nodes:
+ n.mesh.is_visible = True
+
+ depth = self.renderer.render(self._scene)[1]
+ mask = np.logical_and(
+ (np.abs(depth - full_depth) < 1e-6), np.abs(full_depth) > 0
+ )
+ if not np.any(mask):
+ continue
+ if np.any(output[mask] != 0):
+ raise ValueError('wrong label')
+
+ indices = [i+1 for i, x in enumerate(obj_names) if x == n.name]
+ for i in indices:
+ if not np.any(output==i):
+ print('')
+ output[mask] = i
+ break
+
+ n.mesh.is_visible = False
+
+ if render_pc:
+ pcs[i] = self._to_pointcloud(depth*mask)
+
+ for n in self._scene.get_nodes():
+ if n.name not in ['camera', 'parent']:
+ n.mesh.is_visible = True
+
+ return output, ['BACKGROUND'] + obj_names, pcs
\ No newline at end of file
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/summaries.py b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/summaries.py
new file mode 100644
index 0000000000..cb6fadb18c
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/summaries.py
@@ -0,0 +1,127 @@
+
+import numpy as np
+import os
+
+try:
+ import tensorflow.compat.v1 as tf
+ tf.disable_eager_execution()
+ TF2 = True
+except:
+ import tensorflow as tf
+ TF2 = False
+
+from tensorboard import summary as summary_lib
+
+
+def top_grasp_acc_summaries(ops, thres=[0.62,0.65,0.68]):
+ """
+ Calculates the accuracy of grasp contact predictions at different thresholds
+
+ Arguments:
+ ops {dict} -- dict of tf tensors
+
+ Keyword Arguments:
+ thres {list} -- thresholds to report (default: {[0.62,0.65,0.68]})
+
+ Returns:
+ [tf.variable] -- summary update ops
+ """
+
+ binary_seg = tf.reshape(ops['binary_seg_pred'],[ops['binary_seg_pred'].shape[0],ops['binary_seg_pred'].shape[1]])
+ max_conf_idcs = tf.argmax(binary_seg, axis=1)
+ max_confs = tf.math.reduce_max(binary_seg, axis=1)
+
+ row_idcs = tf.cast(tf.range(ops['grasp_suc_labels_pc'].shape[0]), tf.int64)
+ nd_gather_idcs = tf.stack((row_idcs, max_conf_idcs), axis=1)
+ top_labels = tf.gather_nd(ops['grasp_suc_labels_pc'], nd_gather_idcs)
+
+ acc_update_ops = [0] * len(thres)
+ accs = [0] * len(thres)
+ for i,thr in enumerate(thres):
+ accs[i], acc_update_ops[i] = tf.metrics.accuracy(top_labels, tf.greater(max_confs, tf.constant(thr)))
+ tf.summary.scalar('Top Grasp Accuracy Thres %s' % thr, accs[i])
+
+ return acc_update_ops
+
+def build_summary_ops(ops, sess, global_config):
+ """
+ Collect tensorboard summaries
+
+ Arguments:
+ ops {dict} -- dict of tf tensors
+ sess {tf.Session} -- tf session
+ global_config {dict} -- global config
+
+ Returns:
+ [dict] -- summary update ops
+ """
+
+ dir_loss = ops['dir_loss']
+ bin_ce_loss = ops['bin_ce_loss']
+ grasp_suc_labels_pc = ops['grasp_suc_labels_pc']
+ binary_seg_pred = ops['binary_seg_pred']
+
+ tf.summary.scalar('total loss', ops['loss'])
+ tf.summary.scalar('dir_loss', ops['dir_loss'])
+ tf.summary.scalar('approach_loss', ops['approach_loss'])
+ tf.summary.scalar('adds_loss', ops['adds_loss'])
+ tf.summary.scalar('adds_gt2pred_loss', ops['adds_gt2pred_loss'])
+ tf.summary.scalar('bin_ce_loss', ops['bin_ce_loss'])
+ tf.summary.scalar('offset_loss_x%s' % global_config['OPTIMIZER']['offset_loss_weight'], ops['offset_loss'])
+
+ tf.summary.histogram('labels_grasp_suc', ops['grasp_suc_labels_pc'])
+ tf.summary.histogram('preds_grasp_suc', ops['binary_seg_pred'])
+
+ tf.summary.histogram('offset_predictions', ops['grasp_offset_pred'])
+ tf.summary.histogram('offset_labels', ops['offset_labels_pc'])
+
+ # classification:
+ if global_config['MODEL']['bin_offsets']:
+
+ tf.summary.histogram('offset_predictions_binned', ops['offset_pred_idcs_pc'])
+ tf.summary.histogram('offset_labels_binned', ops['offset_label_idcs_pc'])
+ tf.summary.scalar('offset_bin_classification_accuracy', tf.reduce_mean(tf.cast(tf.equal(ops['offset_pred_idcs_pc'], ops['offset_label_idcs_pc']),tf.float32)))
+
+ bin_mean_preds = ops['offset_bin_pred_vals']
+ tf.summary.scalar('offset_mean_abs_error', tf.reduce_mean(tf.abs(bin_mean_preds - tf.layers.flatten(tf.abs(ops['offset_orig_labels_vals'])))))
+ tf.summary.scalar('offset_max_abs_error', tf.reduce_max(tf.abs(bin_mean_preds - tf.layers.flatten(tf.abs(ops['offset_orig_labels_vals'])))))
+
+ merged = tf.summary.merge_all()
+
+ acc_update_ops = top_grasp_acc_summaries(ops, thres=[0.62,0.65,0.68])
+
+ auc, auc_update_op = tf.metrics.auc(tf.layers.flatten(tf.cast(ops['grasp_suc_labels_pc'], tf.bool)),
+ tf.layers.flatten(ops['binary_seg_pred']), curve='PR', summation_method='careful_interpolation')
+ tf.summary.scalar('AUCPR', auc)
+
+ pr_curve_streaming_func = summary_lib.v1.pr_curve_streaming_op if TF2 else summary_lib.pr_curve_streaming_op
+ _, pr_update_op = pr_curve_streaming_func(name='pr_contact_success',
+ predictions=tf.layers.flatten(ops['binary_seg_pred']),
+ labels=tf.layers.flatten(tf.cast(ops['grasp_suc_labels_pc'], tf.bool)),
+ num_thresholds = 201)
+
+ pr_reset_op = tf.variables_initializer(tf.get_collection(tf.GraphKeys.METRIC_VARIABLES))
+ merged_eval = tf.summary.merge_all()
+
+ sess.run(tf.local_variables_initializer())
+
+ summary_ops = {'merged': merged, 'merged_eval': merged_eval, 'pr_update_op':pr_update_op, 'auc_update_op':auc_update_op, 'acc_update_ops':acc_update_ops, 'pr_reset_op': pr_reset_op}
+ return summary_ops
+
+def build_file_writers(sess, log_dir):
+ """
+ Create TF FileWriters for train and test
+
+ Arguments:
+ sess {tf.Session} -- tf session
+ log_dir {str} -- Checkpoint directory
+
+ Returns:
+ [dict] -- tf.summary.FileWriter
+ """
+
+ train_writer = tf.summary.FileWriter(os.path.join(log_dir, 'train'))#, sess.graph) creates large event files!
+ test_writer = tf.summary.FileWriter(os.path.join(log_dir, 'test'))#, sess.graph)
+ writers ={'train_writer': train_writer, 'test_writer': test_writer}
+
+ return writers
\ No newline at end of file
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/train.py b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/train.py
new file mode 100644
index 0000000000..23cb0c69ae
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/train.py
@@ -0,0 +1,197 @@
+from genericpath import exists
+import os
+import sys
+import argparse
+from datetime import datetime
+import numpy as np
+import time
+from tqdm import tqdm
+from tensorboardX import SummaryWriter
+
+import torch
+
+os.environ['PYOPENGL_PLATFORM'] = 'egl' # To get pyrender to work headless
+
+# Import pointnet library
+CONTACT_DIR = os.path.dirname(os.path.abspath(__file__))
+BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+ROOT_DIR = os.path.dirname(os.path.dirname(BASE_DIR))
+
+sys.path.append(os.path.join(BASE_DIR))
+sys.path.append(os.path.join(BASE_DIR, 'Pointnet_Pointnet2_pytorch'))
+
+import config_utils
+from acronym_dataloader import AcryonymDataset
+from contact_graspnet_pytorch.contact_graspnet import ContactGraspnet, ContactGraspnetLoss
+from contact_graspnet_pytorch import utils
+from contact_graspnet_pytorch.checkpoints import CheckpointIO
+
+def train(global_config, log_dir):
+ """
+ Trains Contact-GraspNet. Configure the training process by modifying the
+ config.yaml file.
+
+ Arguments:
+ global_config {dict} -- config dict
+ log_dir {str} -- Checkpoint directory
+
+ """
+ device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
+ # device = torch.device('cpu')
+ batch_size = global_config['OPTIMIZER']['batch_size']
+ num_workers = 0 # Increase after debug
+ train_dataset = AcryonymDataset(global_config, train=True, device=device, use_saved_renders=True)
+ test_dataset = AcryonymDataset(global_config, train=False, device=device, use_saved_renders=True)
+
+ train_dataloader = torch.utils.data.DataLoader(train_dataset,
+ batch_size=batch_size,
+ shuffle=True,
+ num_workers=num_workers)
+ test_dataloader = torch.utils.data.DataLoader(test_dataset,
+ batch_size=batch_size,
+ shuffle=True,
+ num_workers=num_workers)
+
+ grasp_estimator = ContactGraspnet(global_config, device).to(device)
+ loss_fn = ContactGraspnetLoss(global_config, device).to(device)
+ opt = torch.optim.Adam(grasp_estimator.parameters(),
+ lr=global_config['OPTIMIZER']['learning_rate'])
+
+ logger = SummaryWriter(os.path.join(log_dir, 'logs'))
+ checkpoint_dir = os.path.join(log_dir, 'checkpoints')
+ checkpoint_io = CheckpointIO(checkpoint_dir, model=grasp_estimator, opt=opt)
+
+ try:
+ load_dict = checkpoint_io.load('model.pt')
+ except FileExistsError:
+ load_dict = dict()
+
+ cur_epoch = load_dict.get('epoch_it', 0)
+ it = load_dict.get('it', 0)
+ metric_val_best = load_dict.get('loss_val_best', np.inf)
+
+ print_every = global_config['OPTIMIZER']['print_every'] \
+ if 'print_every' in global_config['OPTIMIZER'] else 0
+ checkpoint_every = global_config['OPTIMIZER']['checkpoint_every'] \
+ if 'checkpoint_every' in global_config['OPTIMIZER'] else 0
+ backup_every = global_config['OPTIMIZER']['backup_every'] \
+ if 'backup_every' in global_config['OPTIMIZER'] else 0
+ val_every = global_config['OPTIMIZER']['val_every'] \
+ if 'val_every' in global_config['OPTIMIZER'] else 0
+
+ for epoch_it in range(cur_epoch, global_config['OPTIMIZER']['max_epoch']):
+ log_string('**** EPOCH %03d ****' % epoch_it)
+ grasp_estimator.train()
+
+ # # Start when we left of in dataloader
+ # skip_amount = it % len(train_dataloader)
+
+ pbar = tqdm(train_dataloader)
+ for i, data in enumerate(pbar):
+ # if i < skip_amount:
+ # print('skipping')
+ # continue
+
+ utils.send_dict_to_device(data, device)
+ # Target contains input and target values
+ pc_cam = data['pc_cam']
+
+ pred = grasp_estimator(pc_cam)
+ loss, loss_info = loss_fn(pred, data)
+ opt.zero_grad()
+ loss.backward()
+ opt.step()
+
+ for k, v in loss_info.items():
+ logger.add_scalar(f'train/{k}', v, it)
+
+ # -- Logging -- #
+ # if print_every and it % print_every == 0:
+ # print('[Epoch %02d] it=%03d, loss=%.4f, adds_loss=%.4f'% (epoch_it, it, loss, loss_info['adds_loss']))
+
+ if checkpoint_every and it % checkpoint_every == 0:
+ checkpoint_io.save('model.pt', epoch_it=epoch_it, it=it,
+ loss_val_best=metric_val_best)
+
+ if backup_every and it % backup_every == 0:
+ checkpoint_io.save('model_%d.pt' % it, epoch_it=epoch_it, it=it,
+ loss_val_best=metric_val_best)
+
+ logger.add_scalar('train/loss', loss.item(), it)
+ pbar.set_postfix({'loss': loss.item(),
+ 'epoch': epoch_it})
+
+ it += 1
+
+ # -- Run Validation -- #
+ if val_every and epoch_it % val_every == 0:
+ grasp_estimator.eval()
+ with torch.no_grad():
+ loss_log = []
+ for val_it, data in enumerate(tqdm(test_dataloader)):
+ utils.send_dict_to_device(data, device)
+ # Target contains input and target values
+ pc_cam = data['pc_cam']
+
+ pred = grasp_estimator(pc_cam)
+ loss, loss_info = loss_fn(pred, data)
+ loss_log.append(loss.item())
+ val_loss = np.mean(loss_log)
+ logger.add_scalar('val/val_loss', val_loss, it)
+
+
+ if val_loss < metric_val_best:
+ metric_val_best = val_loss
+ checkpoint_io.save('model_best.pt', epoch_it=epoch_it, it=it,
+ loss_val_best=metric_val_best)
+
+
+if __name__ == "__main__":
+ # Usage:
+ # To continue training: python train_pytorch.py --ckpt_dir {Pcurrent_ckpt_dir}
+ # e.g. python3 train_pytorch.py --ckpt_dir ../checkpoints/contact_graspnet_2
+ # To start training from scratch: python train_pytorch.py
+
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--ckpt_dir', type=str, default=None, help='Checkpoint dir')
+ parser.add_argument('--data_path', type=str, default=None, help='Grasp data root dir')
+ parser.add_argument('--max_epoch', type=int, default=None, help='Epochs to run')
+ parser.add_argument('--batch_size', type=int, default=None, help='Batch Size during training')
+ parser.add_argument('--arg_configs', nargs="*", type=str, default=[], help='overwrite config parameters')
+ FLAGS = parser.parse_args()
+
+ ckpt_dir = FLAGS.ckpt_dir
+ if ckpt_dir is None:
+ # ckpt_dir is contact_graspnet_year_month_day_hour_minute_second
+ ckpt_dir = os.path.join(CONTACT_DIR, '../', f'checkpoints/contact_graspnet_{datetime.now().strftime("Y%YM%mD%d_H%HM%M")}')
+
+ data_path = FLAGS.data_path
+ if data_path is None:
+ data_path = os.path.join(CONTACT_DIR, '../', 'acronym/')
+
+ if not os.path.exists(ckpt_dir):
+ if not os.path.exists(os.path.dirname(ckpt_dir)):
+ ckpt_dir = os.path.join(BASE_DIR, ckpt_dir)
+ os.makedirs(ckpt_dir, exist_ok=True)
+
+ os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
+ os.system('cp {} {}'.format(os.path.join(CONTACT_DIR, 'contact_graspnet.py'), ckpt_dir)) # bkp of model def
+ os.system('cp {} {}'.format(os.path.join(CONTACT_DIR, 'train.py'), ckpt_dir)) # bkp of train procedure
+
+ LOG_FOUT = open(os.path.join(ckpt_dir, 'log_train.txt'), 'w')
+ LOG_FOUT.write(str(FLAGS)+'\n')
+ def log_string(out_str):
+ LOG_FOUT.write(out_str+'\n')
+ LOG_FOUT.flush()
+ print(out_str)
+
+ global_config = config_utils.load_config(ckpt_dir, batch_size=FLAGS.batch_size, max_epoch=FLAGS.max_epoch,
+ data_path= FLAGS.data_path, arg_configs=FLAGS.arg_configs, save=True)
+
+ log_string(str(global_config))
+ log_string('pid: %s'%(str(os.getpid())))
+
+ train(global_config, ckpt_dir)
+
+ LOG_FOUT.close()
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/utils.py b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/utils.py
new file mode 100644
index 0000000000..338023f8ad
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/utils.py
@@ -0,0 +1,399 @@
+import os
+import numpy as np
+import torch
+import cv2
+
+from PIL import Image
+from scipy.spatial import cKDTree
+
+def send_dict_to_device(data, device):
+ """
+ Send all data in dict to torch device
+ """
+ for k, v in data.items():
+ data[k] = v.to(device)
+ return None
+
+def index_points(points, idx):
+ """
+ From pointnet2 repo
+
+ Input:
+ points: input points data, [B, N, C]
+ idx: sample index data, [B, S]
+ Return:
+ new_points:, indexed points data, [B, S, C]
+ """
+ device = points.device
+ B = points.shape[0]
+ view_shape = list(idx.shape)
+ view_shape[1:] = [1] * (len(view_shape) - 1)
+ repeat_shape = list(idx.shape)
+ repeat_shape[0] = 1
+ batch_indices = torch.arange(B, dtype=torch.long).to(device).view(view_shape).repeat(repeat_shape)
+ new_points = points[batch_indices, idx, :]
+ return new_points
+
+def inverse_transform(trans):
+ """
+ Computes the inverse of 4x4 transform.
+
+ Arguments:
+ trans {np.ndarray} -- 4x4 transform.
+
+ Returns:
+ [np.ndarray] -- inverse 4x4 transform
+ """
+ rot = trans[:3, :3]
+ t = trans[:3, 3]
+ rot = np.transpose(rot)
+ t = -np.matmul(rot, t)
+ output = np.zeros((4, 4), dtype=np.float32)
+ output[3][3] = 1
+ output[:3, :3] = rot
+ output[:3, 3] = t
+
+ return output
+
+
+def build_6d_grasp(approach_dirs, base_dirs, contact_pts, thickness, use_torch=False, gripper_depth = 0.1034, device=None):
+ """
+ Build 6-DoF grasps + width from point-wise network predictions
+
+ Arguments:
+ approach_dirs {np.ndarray/torch.tensor} -- BxNx3 approach direction vectors
+ base_dirs {np.ndarray/torch.tensor} -- BxNx3 base direction vectors
+ contact_pts {np.ndarray/torch.tensor} -- BxNx3 contact points
+ thickness {np.ndarray/torch.tensor} -- BxNx1 grasp width
+
+ Keyword Arguments:
+ use_tf {bool} -- whether inputs and outputs are tf tensors (default: {False})
+ gripper_depth {float} -- distance from gripper coordinate frame to gripper baseline in m (default: {0.1034})
+
+ Returns:
+ np.ndarray / torch.tensor -- BxNx4x4 grasp poses in camera coordinates
+ """
+ # We are trying to build a stack of 4x4 homogeneous transform matricies of size B x N x 4 x 4.
+ # To do so, we calculate the rotation and translation portions according to the paper.
+ # This gives us positions as shown:
+ # [ R R R T ]
+ # [ R R R T ]
+ # [ R R R T ]
+ # [ 0 0 0 1 ] Note that the ^ dim is 2 and the --> dim is 3
+ # We need to pad with zeros and ones to get the final shape so we generate
+ # ones and zeros and stack them.
+ if thickness.ndim == 2:
+ thickness = thickness.unsqueeze(2) # B x N x 1
+ if use_torch:
+ if device is None:
+ device = torch.device('cpu')
+ grasp_R = torch.stack([base_dirs, torch.cross(approach_dirs,base_dirs),approach_dirs], dim=3) # B x N x 3 x 3
+ grasp_t = contact_pts + (thickness / 2) * base_dirs - gripper_depth * approach_dirs # B x N x 3
+ grasp_t = grasp_t.unsqueeze(3) # B x N x 3 x 1
+ ones = torch.ones((contact_pts.shape[0], contact_pts.shape[1], 1, 1), dtype=torch.float32).to(device) # B x N x 1 x 1
+ zeros = torch.zeros((contact_pts.shape[0], contact_pts.shape[1], 1, 3), dtype=torch.float32).to(device) # B x N x 1 x 3
+ homog_vec = torch.cat([zeros, ones], dim=3) # B x N x 1 x 4
+ grasps = torch.cat([torch.cat([grasp_R, grasp_t], dim=3), homog_vec], dim=2) # B x N x 4 x 4
+
+ else:
+ raise NotImplementedError("Need to test this more")
+ grasps = []
+ for i in range(len(contact_pts)):
+ grasp = np.eye(4)
+
+ grasp[:3,0] = base_dirs[i] / np.linalg.norm(base_dirs[i])
+ grasp[:3,2] = approach_dirs[i] / np.linalg.norm(approach_dirs[i])
+ grasp_y = np.cross( grasp[:3,2],grasp[:3,0])
+ grasp[:3,1] = grasp_y / np.linalg.norm(grasp_y)
+ # base_gripper xyz = contact + thickness / 2 * baseline_dir - gripper_d * approach_dir
+ grasp[:3,3] = contact_pts[i] + thickness[i] / 2 * grasp[:3,0] - gripper_depth * grasp[:3,2]
+ # grasp[0,3] = finger_width
+ grasps.append(grasp)
+ grasps = np.array(grasps)
+
+ return grasps
+
+def farthest_points(data, nclusters, dist_func, return_center_indexes=False, return_distances=False, verbose=False):
+ """
+ Performs farthest point sampling on data points.
+ Args:
+ data: numpy array of the data points.
+ nclusters: int, number of clusters.
+ dist_dunc: distance function that is used to compare two data points.
+ return_center_indexes: bool, If True, returns the indexes of the center of
+ clusters.
+ return_distances: bool, If True, return distances of each point from centers.
+
+ Returns clusters, [centers, distances]:
+ clusters: numpy array containing the cluster index for each element in
+ data.
+ centers: numpy array containing the integer index of each center.
+ distances: numpy array of [npoints] that contains the closest distance of
+ each point to any of the cluster centers.
+ """
+ if nclusters >= data.shape[0]:
+ if return_center_indexes:
+ return np.arange(data.shape[0], dtype=np.int32), np.arange(data.shape[0], dtype=np.int32)
+
+ return np.arange(data.shape[0], dtype=np.int32)
+
+ clusters = np.ones((data.shape[0],), dtype=np.int32) * -1
+ distances = np.ones((data.shape[0],), dtype=np.float32) * 1e7
+ centers = []
+ for iter in range(nclusters):
+ index = np.argmax(distances)
+ centers.append(index)
+ shape = list(data.shape)
+ for i in range(1, len(shape)):
+ shape[i] = 1
+
+ broadcasted_data = np.tile(np.expand_dims(data[index], 0), shape)
+ new_distances = dist_func(broadcasted_data, data)
+ distances = np.minimum(distances, new_distances)
+ clusters[distances == new_distances] = iter
+ if verbose:
+ print('farthest points max distance : {}'.format(np.max(distances)))
+
+ if return_center_indexes:
+ if return_distances:
+ return clusters, np.asarray(centers, dtype=np.int32), distances
+ return clusters, np.asarray(centers, dtype=np.int32)
+
+ return clusters
+
+
+
+def depth2pc(depth, K, rgb=None):
+ """
+ Currently not used
+
+ Convert depth and intrinsics to point cloud and optionally point cloud color
+ :param depth: hxw depth map in m
+ :param K: 3x3 Camera Matrix with intrinsics
+ :returns: (Nx3 point cloud, point cloud color)
+ """
+
+ mask = np.where(depth > 0)
+ x,y = mask[1], mask[0]
+
+ normalized_x = (x.astype(np.float32) - K[0,2])
+ normalized_y = (y.astype(np.float32) - K[1,2])
+
+ world_x = normalized_x * depth[y, x] / K[0,0]
+ world_y = normalized_y * depth[y, x] / K[1,1]
+ world_z = depth[y, x]
+
+ if rgb is not None:
+ rgb = rgb[y,x,:]
+
+ pc = np.vstack((world_x, world_y, world_z)).T
+ return (pc, rgb)
+
+def reject_median_outliers(data, m=-1.4, z_only=False):
+ """
+ Currently not used
+
+ Reject outliers with median absolute distance m
+
+ Arguments:
+ data {[np.ndarray]} -- Numpy array such as point cloud
+
+ Keyword Arguments:
+ m {[float]} -- Maximum absolute distance from median in m (default: {-1.4})
+ z_only {[bool]} -- filter only via z_component (default: {False})
+
+ Returns:
+ [np.ndarray] -- Filtered data without outliers
+ """
+ if z_only:
+ d = np.abs(data[:,1:3] - np.median(data[:,2:3]))
+ else:
+ d = np.abs(data - np.median(data, axis=-1, keepdims=True))
+
+ return data[np.sum(d, axis=0) < m]
+
+
+def distance_by_translation_point(p1, p2):
+ """
+ Gets two nx3 points and computes the distance between point p1 and p2.
+ """
+ return np.sqrt(np.sum(np.square(p1 - p2), axis=-1))
+
+
+def regularize_pc_point_count(pc, npoints, use_farthest_point=False):
+ """
+ If point cloud pc has less points than npoints, it oversamples.
+ Otherwise, it downsample the input pc to have npoint points.
+ use_farthest_point: indicates
+
+ :param pc: Nx3 point cloud
+ :param npoints: number of points the regularized point cloud should have
+ :param use_farthest_point: use farthest point sampling to downsample the points, runs slower.
+ :returns: npointsx3 regularized point cloud
+ """
+
+ if pc.shape[0] > npoints:
+ if use_farthest_point:
+ _, center_indexes = farthest_points(pc, npoints, distance_by_translation_point, return_center_indexes=True)
+ else:
+ center_indexes = np.random.choice(range(pc.shape[0]), size=npoints, replace=False)
+ pc = pc[center_indexes, :]
+ else:
+ required = npoints - pc.shape[0]
+ if required > 0:
+ index = np.random.choice(range(pc.shape[0]), size=required)
+ pc = np.concatenate((pc, pc[index, :]), axis=0)
+ return pc
+
+
+def load_available_input_data(p, K=None):
+ """
+ Load available data from input file path.
+
+ Numpy files .npz/.npy should have keys
+ 'depth' + 'K' + (optionally) 'segmap' + (optionally) 'rgb'
+ or for point clouds:
+ 'xyz' + (optionally) 'xyz_color'
+
+ png files with only depth data (in mm) can be also loaded.
+ If the image path is from the GraspNet dataset, corresponding rgb, segmap and intrinic are also loaded.
+
+ :param p: .png/.npz/.npy file path that contain depth/pointcloud and optionally intrinsics/segmentation/rgb
+ :param K: 3x3 Camera Matrix with intrinsics
+ :returns: All available data among segmap, rgb, depth, cam_K, pc_full, pc_colors
+ """
+
+ segmap, rgb, depth, pc_full, pc_colors = None, None, None, None, None
+
+ if K is not None:
+ if isinstance(K,str):
+ cam_K = eval(K)
+ cam_K = np.array(K).reshape(3,3)
+
+ if '.np' in p:
+ data = np.load(p, allow_pickle=True)
+ if '.npz' in p:
+ keys = data.files
+ else:
+ keys = []
+ if len(data.shape) == 0:
+ data = data.item()
+ keys = data.keys()
+ elif data.shape[-1] == 3:
+ pc_full = data
+ else:
+ depth = data
+
+ if 'depth' in keys:
+ depth = data['depth']
+ if K is None and 'K' in keys:
+ cam_K = data['K'].reshape(3,3)
+ if 'segmap' in keys:
+ segmap = data['segmap']
+ if 'seg' in keys:
+ segmap = data['seg']
+ if 'rgb' in keys:
+ rgb = data['rgb']
+ rgb = np.array(cv2.cvtColor(rgb, cv2.COLOR_BGR2RGB))
+ elif 'xyz' in keys:
+ pc_full = np.array(data['xyz']).reshape(-1,3)
+ if 'xyz_color' in keys:
+ pc_colors = data['xyz_color']
+ elif '.png' in p:
+ if os.path.exists(p.replace('depth', 'label')):
+ # graspnet data
+ depth, rgb, segmap, K = load_graspnet_data(p)
+ elif os.path.exists(p.replace('depths', 'images').replace('npy', 'png')):
+ rgb = np.array(Image.open(p.replace('depths', 'images').replace('npy', 'png')))
+ else:
+ depth = np.array(Image.open(p))
+ else:
+ raise ValueError('{} is neither png nor npz/npy file'.format(p))
+
+ return segmap, rgb, depth, cam_K, pc_full, pc_colors
+
+
+def load_graspnet_data(rgb_image_path):
+
+ """
+ Loads data from the GraspNet-1Billion dataset
+ # https://graspnet.net/
+
+ :param rgb_image_path: .png file path to depth image in graspnet dataset
+ :returns: (depth, rgb, segmap, K)
+ """
+
+ depth = np.array(Image.open(rgb_image_path))/1000. # m to mm
+ segmap = np.array(Image.open(rgb_image_path.replace('depth', 'label')))
+ rgb = np.array(Image.open(rgb_image_path.replace('depth', 'rgb')))
+
+ # graspnet images are upside down, rotate for inference
+ # careful: rotate grasp poses back for evaluation
+ depth = np.rot90(depth,2)
+ segmap = np.rot90(segmap,2)
+ rgb = np.rot90(rgb,2)
+
+ if 'kinect' in rgb_image_path:
+ # Kinect azure:
+ K=np.array([[631.54864502 , 0. , 638.43517329],
+ [ 0. , 631.20751953, 366.49904066],
+ [ 0. , 0. , 1. ]])
+ else:
+ # Realsense:
+ K=np.array([[616.36529541 , 0. , 310.25881958],
+ [ 0. , 616.20294189, 236.59980774],
+ [ 0. , 0. , 1. ]])
+
+ return depth, rgb, segmap, K
+
+
+
+def estimate_normals_cam_from_pc(self, pc_cam, max_radius=0.05, k=12):
+ """
+ NOT CURRENTLY USED
+
+ Estimates normals in camera coords from given point cloud.
+
+ Arguments:
+ pc_cam {np.ndarray} -- Nx3 point cloud in camera coordinates
+
+ Keyword Arguments:
+ max_radius {float} -- maximum radius for normal computation (default: {0.05})
+ k {int} -- Number of neighbors for normal computation (default: {12})
+
+ Returns:
+ [np.ndarray] -- Nx3 point cloud normals
+ """
+ tree = cKDTree(pc_cam, leafsize=pc_cam.shape[0]+1)
+ _, ndx = tree.query(pc_cam, k=k, distance_upper_bound=max_radius, n_jobs=-1) # num_points x k
+
+ for c,idcs in enumerate(ndx):
+ idcs[idcs==pc_cam.shape[0]] = c
+ ndx[c,:] = idcs
+ neighbors = np.array([pc_cam[ndx[:,n],:] for n in range(k)]).transpose((1,0,2))
+ pc_normals = vectorized_normal_computation(pc_cam, neighbors)
+ return pc_normals
+
+
+def vectorized_normal_computation(pc, neighbors):
+ """
+ Vectorized normal computation with numpy
+
+ Arguments:
+ pc {np.ndarray} -- Nx3 point cloud
+ neighbors {np.ndarray} -- Nxkx3 neigbours
+
+ Returns:
+ [np.ndarray] -- Nx3 normal directions
+ """
+ diffs = neighbors - np.expand_dims(pc, 1) # num_point x k x 3
+ covs = np.matmul(np.transpose(diffs, (0, 2, 1)), diffs) # num_point x 3 x 3
+ covs /= diffs.shape[1]**2
+ # takes most time: 6-7ms
+ eigen_values, eigen_vectors = np.linalg.eig(covs) # num_point x 3, num_point x 3 x 3
+ orders = np.argsort(-eigen_values, axis=1) # num_point x 3
+ orders_third = orders[:,2] # num_point
+ directions = eigen_vectors[np.arange(pc.shape[0]),:,orders_third] # num_point x 3
+ dots = np.sum(directions * pc, axis=1) # num_point
+ directions[dots >= 0] = -directions[dots >= 0]
+ return directions
\ No newline at end of file
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/visualization_utils_o3d.py b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/visualization_utils_o3d.py
new file mode 100644
index 0000000000..ecb6549200
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/visualization_utils_o3d.py
@@ -0,0 +1,313 @@
+import os
+import numpy as np
+import plotly.express as px
+import plotly.graph_objects as go
+
+import open3d as o3d
+
+import matplotlib.pyplot as plt
+from matplotlib import cm
+
+from scipy.spatial.transform import Rotation as R
+
+import mesh_utils
+
+
+# To fix GLIB open3d error:
+# https://askubuntu.com/questions/1393285/how-to-install-glibcxx-3-4-29-on-ubuntu-20-04
+# To fix xcb error, try uninstalling all qt things
+
+def plot_mesh(mesh, cam_trafo=np.eye(4), mesh_pose=np.eye(4)):
+ """
+ Plots mesh in mesh_pose from
+
+ Arguments:
+ mesh {trimesh.base.Trimesh} -- input mesh, e.g. gripper
+
+ Keyword Arguments:
+ cam_trafo {np.ndarray} -- 4x4 transformation from world to camera coords (default: {np.eye(4)})
+ mesh_pose {np.ndarray} -- 4x4 transformation from mesh to world coords (default: {np.eye(4)})
+ """
+
+ homog_mesh_vert = np.pad(mesh.vertices, (0, 1), 'constant', constant_values=(0, 1))
+ mesh_cam = homog_mesh_vert.dot(mesh_pose.T).dot(cam_trafo.T)[:,:3]
+ mlab.triangular_mesh(mesh_cam[:, 0],
+ mesh_cam[:, 1],
+ mesh_cam[:, 2],
+ mesh.faces,
+ colormap='Blues',
+ opacity=0.5)
+
+# def plot_coordinates(t,r, tube_radius=0.005):
+# """
+# plots coordinate frame
+
+# Arguments:
+# t {np.ndarray} -- translation vector
+# r {np.ndarray} -- rotation matrix
+
+# Keyword Arguments:
+# tube_radius {float} -- radius of the plotted tubes (default: {0.005})
+# """
+# mlab.plot3d([t[0],t[0]+0.2*r[0,0]], [t[1],t[1]+0.2*r[1,0]], [t[2],t[2]+0.2*r[2,0]], color=(1,0,0), tube_radius=tube_radius, opacity=1)
+# mlab.plot3d([t[0],t[0]+0.2*r[0,1]], [t[1],t[1]+0.2*r[1,1]], [t[2],t[2]+0.2*r[2,1]], color=(0,1,0), tube_radius=tube_radius, opacity=1)
+# mlab.plot3d([t[0],t[0]+0.2*r[0,2]], [t[1],t[1]+0.2*r[1,2]], [t[2],t[2]+0.2*r[2,2]], color=(0,0,1), tube_radius=tube_radius, opacity=1)
+
+
+def plot_coordinates(vis, t, r, tube_radius=0.005, central_color=None):
+ """
+ Plots coordinate frame
+
+ Arguments:
+ t {np.ndarray} -- translation vector
+ r {np.ndarray} -- rotation matrix
+
+ Keyword Arguments:
+ tube_radius {float} -- radius of the plotted tubes (default: {0.005})
+ """
+
+ # Create a line for each axis of the coordinate frame
+ lines = []
+ colors = [(1, 0, 0), (0, 1, 0), (0, 0, 1)] # Red, Green, Blue
+
+ if central_color is not None:
+ ball = o3d.geometry.TriangleMesh.create_sphere(radius=0.005)
+ ball.paint_uniform_color(np.array(central_color))
+ vis.add_geometry(ball)
+
+ for i in range(3):
+ line_points = [[t[0], t[1], t[2]],
+ [t[0] + 0.2 * r[0, i], t[1] + 0.2 * r[1, i], t[2] + 0.2 * r[2, i]]]
+
+ line = o3d.geometry.LineSet()
+ line.points = o3d.utility.Vector3dVector(line_points)
+ line.lines = o3d.utility.Vector2iVector(np.array([[0, 1]]))
+ line.colors = o3d.utility.Vector3dVector(np.array([colors[i]]))
+
+ line.paint_uniform_color(colors[i]) # Set line color
+ lines.append(line)
+
+ # Visualize the lines in the Open3D visualizer
+ for line in lines:
+ vis.add_geometry(line)
+
+def show_image(rgb, segmap):
+ """
+ Overlay rgb image with segmentation and imshow segment
+ Saves to debug_img directory
+
+ Arguments:
+ rgb {np.ndarray} -- color image
+ segmap {np.ndarray} -- integer segmap of same size as rgb
+ """
+ fig = plt.figure()
+
+ if rgb is not None:
+ plt.imshow(rgb)
+ if segmap is not None:
+ cmap = plt.get_cmap('rainbow')
+ cmap.set_under(alpha=0.0)
+ plt.imshow(segmap, cmap=cmap, alpha=0.5, vmin=0.0001)
+
+ base_path = './contact_graspnet_pytorch/testers/'
+ debug_path = os.path.join(base_path, 'debug_img')
+ if not os.path.exists(debug_path):
+ os.makedirs(debug_path)
+ imgs = os.listdir(debug_path)
+ i = 0
+ while f'debug_img_rgb{i}.png' in imgs:
+ i += 1
+ if i == 1000:
+ raise Exception('Could not save debug image')
+
+ plt.savefig(os.path.join(debug_path, f'debug_img_rgb{i}.png'))
+
+def visualize_grasps(full_pc, pred_grasps_cam, scores, plot_opencv_cam=False, pc_colors=None, gripper_openings=None, gripper_width=0.08,
+ T_world_cam=np.eye(4), plot_others=[]):
+ """Visualizes colored point cloud and predicted grasps. If given, colors grasps by segmap regions.
+ Thick grasp is most confident per segment. For scene point cloud predictions, colors grasps according to confidence.
+
+ Arguments:
+ full_pc {np.ndarray} -- Nx3 point cloud of the scene
+ pred_grasps_cam {dict[int:np.ndarray]} -- Predicted 4x4 grasp trafos per segment or for whole point cloud
+ scores {dict[int:np.ndarray]} -- Confidence scores for grasps
+
+ Keyword Arguments:
+ plot_opencv_cam {bool} -- plot camera coordinate frame (default: {False})
+ pc_colors {np.ndarray} -- Nx3 point cloud colors (default: {None})
+ gripper_openings {dict[int:np.ndarray]} -- Predicted grasp widths (default: {None})
+ gripper_width {float} -- If gripper_openings is None, plot grasp widths (default: {0.008})
+ """
+
+ print('Visualizing...')
+ pcd = o3d.geometry.PointCloud()
+ pcd.points = o3d.utility.Vector3dVector(full_pc)
+ pcd.colors = o3d.utility.Vector3dVector(pc_colors.astype(np.float64) / 255)
+
+ vis = o3d.visualization.Visualizer()
+ vis.create_window()
+ vis.add_geometry(pcd)
+
+ if plot_opencv_cam:
+ plot_coordinates(vis, np.zeros(3,),np.eye(3,3), central_color=(0.5, 0.5, 0.5))
+ # This is world in cam frame
+ T_cam_world = np.linalg.inv(T_world_cam) # We plot everything in the camera frame
+ t = T_cam_world[:3,3]
+ r = T_cam_world[:3,:3]
+ plot_coordinates(vis, t, r)
+
+ for t in plot_others:
+ plot_coordinates(vis, t[:3, 3], t[:3,:3])
+
+ cm = plt.get_cmap('rainbow')
+ cm2 = plt.get_cmap('viridis')
+
+ colors = [cm(1. * i/len(pred_grasps_cam))[:3] for i in range(len(pred_grasps_cam))]
+ colors2 = {k:cm2(0.5*np.max(scores[k]))[:3] for k in pred_grasps_cam if np.any(pred_grasps_cam[k])}
+
+ for i,k in enumerate(pred_grasps_cam):
+ if np.any(pred_grasps_cam[k]):
+ # Set gripper openings
+ if gripper_openings is None:
+ gripper_openings_k = np.ones(len(pred_grasps_cam[k]))*gripper_width
+ else:
+ gripper_openings_k = gripper_openings[k]
+
+ if len(pred_grasps_cam) > 1:
+ draw_grasps(vis, pred_grasps_cam[k], np.eye(4), colors=[colors[i]], gripper_openings=gripper_openings_k)
+ draw_grasps(vis, [pred_grasps_cam[k][np.argmax(scores[k])]], np.eye(4), colors=[colors2[k]],
+ gripper_openings=[gripper_openings_k[np.argmax(scores[k])]], tube_radius=0.0025)
+ else:
+ max_score = np.max(scores[k])
+ min_score = np.min(scores[k])
+
+ colors3 = [cm2((score - min_score) / (max_score - min_score))[:3] for score in scores[k]]
+ draw_grasps(vis, pred_grasps_cam[k], np.eye(4), colors=colors3, gripper_openings=gripper_openings_k)
+ best_grasp_idx = np.argmax(scores[k])
+ draw_grasps(vis, [pred_grasps_cam[k][best_grasp_idx]], np.eye(4), colors=[(1, 0, 0)], gripper_openings=gripper_openings_k)
+
+ vis.run()
+ vis.destroy_window()
+ return
+
+
+def draw_pc_with_colors(pc, pc_colors=None, single_color=(0.3,0.3,0.3), mode='2dsquare', scale_factor=0.0018):
+ """
+ Draws colored point clouds
+
+ Arguments:
+ pc {np.ndarray} -- Nx3 point cloud
+ pc_colors {np.ndarray} -- Nx3 point cloud colors
+
+ Keyword Arguments:
+ single_color {tuple} -- single color for point cloud (default: {(0.3,0.3,0.3)})
+ mode {str} -- primitive type to plot (default: {'point'})
+ scale_factor {float} -- Scale of primitives. Does not work for points. (default: {0.002})
+
+ """
+
+ if pc_colors is None:
+ mlab.points3d(pc[:, 0], pc[:, 1], pc[:, 2], color=single_color, scale_factor=scale_factor, mode=mode)
+ else:
+ #create direct grid as 256**3 x 4 array
+ def create_8bit_rgb_lut():
+ xl = np.mgrid[0:256, 0:256, 0:256]
+ lut = np.vstack((xl[0].reshape(1, 256**3),
+ xl[1].reshape(1, 256**3),
+ xl[2].reshape(1, 256**3),
+ 255 * np.ones((1, 256**3)))).T
+ return lut.astype('int32')
+
+ scalars = pc_colors[:,0]*256**2 + pc_colors[:,1]*256 + pc_colors[:,2]
+ rgb_lut = create_8bit_rgb_lut()
+ points_mlab = mlab.points3d(pc[:, 0], pc[:, 1], pc[:, 2], scalars, mode=mode, scale_factor=.0018)
+ points_mlab.glyph.scale_mode = 'scale_by_vector'
+ points_mlab.module_manager.scalar_lut_manager.lut._vtk_obj.SetTableRange(0, rgb_lut.shape[0])
+ points_mlab.module_manager.scalar_lut_manager.lut.number_of_colors = rgb_lut.shape[0]
+ points_mlab.module_manager.scalar_lut_manager.lut.table = rgb_lut
+
+def draw_grasps(vis, grasps, cam_pose, gripper_openings, colors=[(0, 1., 0)], show_gripper_mesh=False, tube_radius=0.0008):
+ """
+ Draws wireframe grasps from given camera pose and with given gripper openings
+
+ Arguments:
+ grasps {np.ndarray} -- Nx4x4 grasp pose transformations
+ cam_pose {np.ndarray} -- 4x4 camera pose transformation
+ gripper_openings {np.ndarray} -- Nx1 gripper openings
+
+ Keyword Arguments:
+ color {tuple} -- color of all grasps (default: {(0,1.,0)})
+ colors {np.ndarray} -- Nx3 color of each grasp (default: {None})
+ tube_radius {float} -- Radius of the grasp wireframes (default: {0.0008})
+ show_gripper_mesh {bool} -- Renders the gripper mesh for one of the grasp poses (default: {False})
+ """
+
+ gripper = mesh_utils.create_gripper('panda')
+ gripper_control_points = gripper.get_control_point_tensor(1, False, convex_hull=False).squeeze()
+ mid_point = 0.5*(gripper_control_points[1, :] + gripper_control_points[2, :])
+ grasp_line_plot = np.array([np.zeros((3,)), mid_point, gripper_control_points[1], gripper_control_points[3],
+ gripper_control_points[1], gripper_control_points[2], gripper_control_points[4]])
+
+ if show_gripper_mesh and len(grasps) > 0:
+ plot_mesh(gripper.hand, cam_pose, grasps[0])
+
+ all_pts = []
+ connections = []
+ index = 0
+ N = 7
+ color_arr = []
+ for i,(g,g_opening) in enumerate(zip(grasps, gripper_openings)):
+ gripper_control_points_closed = grasp_line_plot.copy()
+ gripper_control_points_closed[2:,0] = np.sign(grasp_line_plot[2:,0]) * g_opening/2
+
+ pts = np.matmul(gripper_control_points_closed, g[:3, :3].T)
+ pts += np.expand_dims(g[:3, 3], 0)
+ pts_homog = np.concatenate((pts, np.ones((7, 1))),axis=1)
+ pts = np.dot(pts_homog, cam_pose.T)[:,:3]
+
+ # color = color if colors is None else colors[i]
+ # colors.append(color)
+
+ all_pts.append(pts)
+ connections.append(np.vstack([np.arange(index, index + N - 1.5),
+ np.arange(index + 1, index + N - .5)]).T)
+ index += N
+ # mlab.plot3d(pts[:, 0], pts[:, 1], pts[:, 2], color=color, tube_radius=tube_radius, opacity=1.0)
+
+ # speeds up plot3d because only one vtk object
+ all_pts = np.vstack(all_pts)
+ connections = np.vstack(connections)
+
+ line_set = o3d.geometry.LineSet()
+ line_set.points = o3d.utility.Vector3dVector(all_pts)
+ line_set.lines = o3d.utility.Vector2iVector(connections)
+ # colors = np.array(colors).astype(np.float64)
+
+ if len(colors) == 1:
+ colors = np.vstack(colors).astype(np.float64)
+ colors = np.repeat(colors, len(grasps), axis=0)
+ elif len(colors) == len(grasps):
+ colors = np.vstack(colors).astype(np.float64)
+ else:
+ raise ValueError('Number of colors must be 1 or equal to number of grasps')
+ colors = np.repeat(colors, N - 1, axis=0)
+ line_set.colors = o3d.utility.Vector3dVector(colors)
+ # line_set.paint_uniform_color(color) # Set line color
+
+ # mat = o3d.visualization.rendering.MaterialRecord()
+ # mat.shader = "unlitLine"
+ # mat.line_width = 10
+ vis.add_geometry(line_set)
+ # vis.draw({
+ # 'name': 'grasps',
+ # 'geometry': line_set,
+ # 'material': mat
+ # })
+
+
+ # src = mlab.pipeline.scalar_scatter(all_pts[:,0], all_pts[:,1], all_pts[:,2])
+ # src.mlab_source.dataset.lines = connections
+ # src.update()
+ # lines =mlab.pipeline.tube(src, tube_radius=tube_radius, tube_sides=12)
+ # mlab.pipeline.surface(lines, color=color, opacity=1.0)
+
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/visualize_saved_scene.py b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/visualize_saved_scene.py
new file mode 100644
index 0000000000..ea4de97e8b
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/contact_graspnet_pytorch/visualize_saved_scene.py
@@ -0,0 +1,11 @@
+import open3d as o3d
+import numpy as np
+from contact_graspnet_pytorch.visualization_utils_o3d import visualize_grasps, show_image
+
+data = np.load('results/predictions_7.npz', allow_pickle=True)
+pred_grasps_cam = data['pred_grasps_cam'].item()
+scores = data['scores'].item()
+pc_full = data['pc_full']
+pc_colors = data['pc_colors']
+
+visualize_grasps(pc_full, pred_grasps_cam, scores, plot_opencv_cam=True, pc_colors=pc_colors)
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/docs/acronym_setup.md b/dimos/models/manipulation/contact_graspnet_pytorch/docs/acronym_setup.md
new file mode 100644
index 0000000000..8f3b7d27d4
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/docs/acronym_setup.md
@@ -0,0 +1,115 @@
+# Set up Acronym Dataset
+
+Instructions are based off of https://github.com/NVlabs/acronym#using-the-full-acronym-dataset.
+
+## Download Data
+
+1. Download the acronym dataset and extract it to the `acronym/grasps` directory: [acronym.tar.gz](https://drive.google.com/file/d/1OjykLD9YmnFdfYpH2qO8yBo-I-22vKwu/view?usp=sharing)
+
+2. Download the ShapeNetSem meshes (models-OBJ.zip) from [https://www.shapenet.org/](https://www.shapenet.org/) and
+extract it to `acronym/models`.
+
+The directory structure should look like this:
+
+```
+acronym
+├── grasps
+│ ├── *.h5
+├── models
+│ ├── *.obj
+| ├── *.mtl
+```
+
+## Get Manifold Waterproofing Library
+In order to run physics simulation, we must waterproof the meshes.
+
+Clone and build Manifold in the `acronym` directory. This may take a while:
+
+```
+cd acronym
+git clone --recursive git@github.com:hjwdzh/Manifold.git
+cd Manifold
+mkdir build
+cd build
+cmake .. -DCMAKE_BUILD_TYPE=Release
+make
+```
+Note: We use a different `git clone` command than the Manifold repo. The original repo's command
+hangs.
+
+Additional information can be found at https://github.com/hjwdzh/Manifold.
+
+
+The directory structure should look like this:
+```
+acronym
+├── grasps
+│ ├── *.h5
+├── models
+│ ├── *.obj
+| ├── *.mtl
+├── Manifold
+```
+
+## Waterproof the Meshes
+
+Run the waterproofing script:
+
+```
+python3 tools/waterproof_meshes.py
+```
+This will waterproof the meshes in `acronym/models` and save them to
+`acronym/meshes`. Additionally, a log file `acronym/failed.txt` will
+document any meshes that failed to waterproof.
+
+This may take a while. After this is done, it is safe to
+remove the `acronym/models` directory.
+
+### Debugging
+
+If a large number of files appear to be missing, try re-downloading the
+ShapeNetSem meshes.
+
+Additionally, if your computer crashes, try reducing the number of cpu cores
+used in `waterproof_meshes.py`.
+
+## Refactory Mesh Directory
+A number of the scripts expect the mesh directory to include object categories
+in the file path.
+
+e.g.
+
+```
+acronym
+├── meshes
+│ ├── airplane
+│ │ ├── id_1.obj
+│ │ ├── id_2.obj
+```
+
+Run the following script to restructure the mesh directory:
+
+```
+python3 tools/refactor_mesh_dir.py
+```
+
+## Download Scene Data
+Download `scene_contacts.zip` and `splits.zip` from [here](https://drive.google.com/drive/folders/1eeEXAISPaStZyjMX8BHR08cdQY4HF4s0?usp=sharing) and extract both to the `acronym` folder:
+
+The directory structure should look like this (`models` and `Manifold` are
+optional):
+
+```
+acronym
+├── grasps
+├── models (optional)
+├── Manifold (optional)
+├── meshes
+├── scenes_contacts
+├── splits
+```
+
+## Final Notes
+At this point, the `acronym` dataset should be ready for use.
+Additional instructions are also available for generating new scenes. (see `README.md`)
+
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/docs/generate_scenes.md b/dimos/models/manipulation/contact_graspnet_pytorch/docs/generate_scenes.md
new file mode 100644
index 0000000000..9494b5e4f5
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/docs/generate_scenes.md
@@ -0,0 +1,23 @@
+# Generate New Scenes
+
+## Introduction
+This is an optional procedure to generate custom scenes using the grasps
+from the acronym dataset. Instructions are based off the original instructions
+in the Contact-GraspNet repo
+
+The scene_contacts downloaded above are generated from the Acronym dataset. To generate/visualize table-top scenes yourself, also pip install the [acronym_tools](https://github.com/NVlabs/acronym) package in your conda environment as described in the acronym repository.
+
+## Generate Contact Points
+
+
+DOES NOT WORK YET
+
+First, object-wise 6-DoF grasps are mapped to their contact points saved in mesh_contacts.
+These will be used when we train the network.
+
+This assumes your data is in the `acronym/` directory. If you have a different
+data directory, change the path accordingly.
+
+```
+python3 tools/create_contact_infos.py acronym/
+```
\ No newline at end of file
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/examples/10.png b/dimos/models/manipulation/contact_graspnet_pytorch/examples/10.png
new file mode 100644
index 0000000000..ef1e3f4c6e
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/examples/10.png differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/examples/10_segmap.png b/dimos/models/manipulation/contact_graspnet_pytorch/examples/10_segmap.png
new file mode 100644
index 0000000000..558c0b521f
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/examples/10_segmap.png differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/examples/2.gif b/dimos/models/manipulation/contact_graspnet_pytorch/examples/2.gif
new file mode 100644
index 0000000000..a060f02520
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/examples/2.gif differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/examples/2.png b/dimos/models/manipulation/contact_graspnet_pytorch/examples/2.png
new file mode 100644
index 0000000000..50389595ac
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/examples/2.png differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/examples/2_segmap.png b/dimos/models/manipulation/contact_graspnet_pytorch/examples/2_segmap.png
new file mode 100644
index 0000000000..c745f51015
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/examples/2_segmap.png differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/examples/7.gif b/dimos/models/manipulation/contact_graspnet_pytorch/examples/7.gif
new file mode 100644
index 0000000000..c95493c16b
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/examples/7.gif differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/examples/7.png b/dimos/models/manipulation/contact_graspnet_pytorch/examples/7.png
new file mode 100644
index 0000000000..01bf38dd57
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/examples/7.png differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/examples/7_segmap.png b/dimos/models/manipulation/contact_graspnet_pytorch/examples/7_segmap.png
new file mode 100644
index 0000000000..de5431fd50
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/examples/7_segmap.png differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/examples/graspnet_153_kinectazure_0002.png b/dimos/models/manipulation/contact_graspnet_pytorch/examples/graspnet_153_kinectazure_0002.png
new file mode 100644
index 0000000000..8147ebbf16
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/examples/graspnet_153_kinectazure_0002.png differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/examples/realsense_crop_sigma_001.png b/dimos/models/manipulation/contact_graspnet_pytorch/examples/realsense_crop_sigma_001.png
new file mode 100644
index 0000000000..5c72aaad07
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/examples/realsense_crop_sigma_001.png differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/examples/synthetic_test.png b/dimos/models/manipulation/contact_graspnet_pytorch/examples/synthetic_test.png
new file mode 100644
index 0000000000..1b7d94903f
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/examples/synthetic_test.png differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/gripper_control_points/panda.npy b/dimos/models/manipulation/contact_graspnet_pytorch/gripper_control_points/panda.npy
new file mode 100644
index 0000000000..91712dd3ce
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/gripper_control_points/panda.npy differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/gripper_control_points/panda_gripper_coords.pickle b/dimos/models/manipulation/contact_graspnet_pytorch/gripper_control_points/panda_gripper_coords.pickle
new file mode 100644
index 0000000000..a7e38d75e2
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/gripper_control_points/panda_gripper_coords.pickle
@@ -0,0 +1,164 @@
+(dp0
+S'gripper_left_tip_flat'
+p1
+cnumpy.core.multiarray
+_reconstruct
+p2
+(cnumpy
+ndarray
+p3
+(I0
+tp4
+S'b'
+p5
+tp6
+Rp7
+(I1
+(I3
+tp8
+cnumpy
+dtype
+p9
+(S'f8'
+p10
+I0
+I1
+tp11
+Rp12
+(I3
+S'<'
+p13
+NNNI-1
+I-1
+I0
+tp14
+bI00
+S'Y\x868\xd6\xc5m\xa4?\x00\x00\x00\x00\x00\x00\x00\x00r\xfaz\xbef\xb9\xbc?'
+p15
+tp16
+bsS'gripper_right_tip_flat'
+p17
+g2
+(g3
+(I0
+tp18
+g5
+tp19
+Rp20
+(I1
+(I3
+tp21
+g12
+I00
+S'Y\x868\xd6\xc5m\xa4\xbf\x00\x00\x00\x00\x00\x00\x00\x00r\xfaz\xbef\xb9\xbc?'
+p22
+tp23
+bsS'gripper_left_tip_indent'
+p24
+g2
+(g3
+(I0
+tp25
+g5
+tp26
+Rp27
+(I1
+(I3
+tp28
+g12
+I00
+S'\xd0\x98I\xd4\x0b>\xa5?\x00\x00\x00\x00\x00\x00\x00\x00r\xfaz\xbef\xb9\xbc?'
+p29
+tp30
+bsS'gripper_right_center_indent'
+p31
+g2
+(g3
+(I0
+tp32
+g5
+tp33
+Rp34
+(I1
+(I3
+tp35
+g12
+I00
+S'\xd0\x98I\xd4\x0b>\xa5\xbf\x00\x00\x00\x00\x00\x00\x00\x00\xd5\th"lx\xba?'
+p36
+tp37
+bsS'l_y'
+p38
+F0.01754
+sS'gripper_left_center_flat'
+p39
+g2
+(g3
+(I0
+tp40
+g5
+tp41
+Rp42
+(I1
+(I3
+tp43
+g12
+I00
+S'Y\x868\xd6\xc5m\xa4?\x00\x00\x00\x00\x00\x00\x00\x00\xd5\th"lx\xba?'
+p44
+tp45
+bsS'l_z'
+p46
+F0.01754
+sS'gripper_left_center_indent'
+p47
+g2
+(g3
+(I0
+tp48
+g5
+tp49
+Rp50
+(I1
+(I3
+tp51
+g12
+I00
+S'\xd0\x98I\xd4\x0b>\xa5?\x00\x00\x00\x00\x00\x00\x00\x00\xd5\th"lx\xba?'
+p52
+tp53
+bsS'gripper_right_center_flat'
+p54
+g2
+(g3
+(I0
+tp55
+g5
+tp56
+Rp57
+(I1
+(I3
+tp58
+g12
+I00
+S'Y\x868\xd6\xc5m\xa4\xbf\x00\x00\x00\x00\x00\x00\x00\x00\xd5\th"lx\xba?'
+p59
+tp60
+bsS'gripper_right_tip_indent'
+p61
+g2
+(g3
+(I0
+tp62
+g5
+tp63
+Rp64
+(I1
+(I3
+tp65
+g12
+I00
+S'\xd0\x98I\xd4\x0b>\xa5\xbf\x00\x00\x00\x00\x00\x00\x00\x00r\xfaz\xbef\xb9\xbc?'
+p66
+tp67
+bs.
\ No newline at end of file
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/gripper_control_points/panda_gripper_coords.yml b/dimos/models/manipulation/contact_graspnet_pytorch/gripper_control_points/panda_gripper_coords.yml
new file mode 100644
index 0000000000..343ab7731b
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/gripper_control_points/panda_gripper_coords.yml
@@ -0,0 +1,10 @@
+gripper_left_center_indent: [0.041489, 0, 0.1034]
+gripper_left_center_flat: [0.0399, 0, 0.1034]
+gripper_right_center_indent: [-0.041489, 0, 0.1034]
+gripper_right_center_flat: [-0.0399, 0, 0.1034]
+gripper_left_tip_indent: [0.041489, 0, 0.112204]
+gripper_left_tip_flat: [0.0399, 0, 0.112204]
+gripper_right_tip_indent: [-0.041489, 0, 0.112204]
+gripper_right_tip_flat: [-0.0399, 0, 0.112204]
+l_z: 0.01754
+l_y: 0.01754
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/gripper_models/featuretype.STL b/dimos/models/manipulation/contact_graspnet_pytorch/gripper_models/featuretype.STL
new file mode 100644
index 0000000000..c8848936b8
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/gripper_models/featuretype.STL differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/gripper_models/panda_gripper/finger.stl b/dimos/models/manipulation/contact_graspnet_pytorch/gripper_models/panda_gripper/finger.stl
new file mode 100644
index 0000000000..311b2780e8
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/gripper_models/panda_gripper/finger.stl differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/gripper_models/panda_gripper/hand.stl b/dimos/models/manipulation/contact_graspnet_pytorch/gripper_models/panda_gripper/hand.stl
new file mode 100644
index 0000000000..de404b5a93
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/gripper_models/panda_gripper/hand.stl differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/gripper_models/panda_gripper/panda_gripper.obj b/dimos/models/manipulation/contact_graspnet_pytorch/gripper_models/panda_gripper/panda_gripper.obj
new file mode 100644
index 0000000000..b1f1e54920
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/gripper_models/panda_gripper/panda_gripper.obj
@@ -0,0 +1,402 @@
+v 0.05421821303665638 0.007778378669172524 0.11076592143774033
+v 0.039979396890921635 -0.00613801833242178 0.11197762053608895
+v 0.042534183906391264 0.010387184098362923 0.0585316962443525
+v 0.054274293556809426 -0.00583982700482011 0.11220345135927201
+v 0.06640338242053986 -0.01035996433347464 0.05855462865121663
+v 0.03997647545023938 0.008730395697057247 0.09456484627127648
+v 0.03994319871620974 0.008622935973107815 0.10935282196998597
+v 0.05610156148672104 0.010479452088475226 0.07738816600441933
+v 0.0652202693372965 0.010388848371803757 0.07758761990964413
+v 0.06525340951979161 -0.01040047220885754 0.07743658919036389
+v 0.05391758117824793 0.008841173723340033 0.10898935657143594
+v 0.05510264165699482 -0.010494819842278959 0.07683558996915818
+v 0.03990887965395814 0.005481416825205088 0.11212470989823342
+v 0.053988714665174485 -0.008616076782345774 0.1097279139995575
+v 0.05429311186075211 0.005277918651700018 0.11224903401136399
+v 0.039867356615141035 -0.00869277399033308 0.10856622692346574
+v 0.06630941011011601 0.010401263833045956 0.058566509751579725
+v 0.04258330418728292 -0.010448622517287731 0.05854680107415188
+v -0.05421821303665638 -0.007778378669172525 0.11076592143774033
+v -0.039979396890921635 0.00613801833242178 0.11197762053608895
+v -0.042534183906391264 -0.010387184098362923 0.0585316962443525
+v -0.054274293556809426 0.005839827004820108 0.11220345135927201
+v -0.06640338242053986 0.010359964333474636 0.05855462865121663
+v -0.03997647545023938 -0.008730395697057247 0.09456484627127648
+v -0.03994319871620974 -0.008622935973107815 0.10935282196998597
+v -0.05610156148672104 -0.010479452088475227 0.07738816600441933
+v -0.0652202693372965 -0.01038884837180376 0.07758761990964413
+v -0.06525340951979161 0.010400472208857536 0.07743658919036389
+v -0.05391758117824793 -0.008841173723340034 0.10898935657143594
+v -0.05510264165699482 0.010494819842278957 0.07683558996915818
+v -0.03990887965395814 -0.005481416825205088 0.11212470989823342
+v -0.053988714665174485 0.008616076782345772 0.1097279139995575
+v -0.05429311186075211 -0.00527791865170002 0.11224903401136399
+v -0.039867356615141035 0.00869277399033308 0.10856622692346574
+v -0.06630941011011601 -0.01040126383304596 0.058566509751579725
+v -0.04258330418728292 0.010448622517287731 0.05854680107415188
+v -0.04178430512547493 -0.028010861948132515 -0.0063964021392166615
+v -0.09769713878631592 -0.019066786393523216 0.024627480655908585
+v -0.09701105952262878 0.01980205439031124 0.020913975313305855
+v -0.09099503606557846 -0.00017339896294288337 0.0005767836119048297
+v 0.0005787304835394025 -0.031635917723178864 0.005973074585199356
+v -0.09544170647859573 -0.021822135895490646 0.021756364032626152
+v -0.10028521716594696 0.016218392178416252 0.04586976766586304
+v 0.1000911220908165 -0.014017123728990555 0.056026946753263474
+v -0.09460194408893585 0.018555866554379463 0.056617286056280136
+v -0.09433312714099884 0.021342597901821136 0.008942007087171078
+v 0.08797474205493927 -0.0006270006415434182 -0.024961603805422783
+v -0.09127645939588547 -0.014421950094401836 0.06568973511457443
+v -0.09320925921201706 -0.01604551635682583 0.06361687183380127
+v -0.10042587667703629 -0.004494380671530962 0.058119092136621475
+v 0.09249575436115265 -0.012987935915589333 0.06571194529533386
+v -0.09116631746292114 -0.023555776104331017 0.004607920069247484
+v 0.09070632606744766 -0.015471714548766613 0.06519009917974472
+v -0.08543447405099869 -0.022732771933078766 -0.004120331723242998
+v -0.09289686381816864 -0.023026082664728165 0.009876862168312073
+v 0.09790761768817902 -0.01751038245856762 0.05166616290807724
+v 0.0005585200269706547 0.03158979117870331 0.006214227061718702
+v 0.10119467973709106 0.003602118231356144 -0.012627636082470417
+v 0.09665588289499283 0.0004695942625403404 0.06307835876941681
+v 0.09384757280349731 0.017607156187295914 0.058448486030101776
+v -0.04204234480857849 0.02801508456468582 -0.006349603645503521
+v 0.09945143014192581 3.1802206649445e-05 -0.017161205410957336
+v 0.04551994055509567 0.0019174328772351146 -0.025659434497356415
+v -0.09025220572948456 -0.01676723174750805 0.06433220207691193
+v 0.10030148178339005 0.001190581009723246 0.05862313136458397
+v -0.07437754422426224 0.024816755205392838 -0.007155700121074915
+v 0.10259784758090973 -0.0034295637160539627 -0.00896429643034935
+v 0.1027156412601471 0.003494761884212494 -0.008029340766370296
+v -0.08589234948158264 -0.02449524775147438 -0.0018327292054891586
+v 0.09406288713216782 0.0131387272849679 0.06468318402767181
+v -0.08206511288881302 -0.025270354002714157 0.0051970407366752625
+v -0.09466791152954102 -0.02065763622522354 0.009537984617054462
+v -0.08824997395277023 0.022314293310046196 -0.0019331998191773891
+v -0.09747105836868286 -0.0016167220892384648 0.06230733543634415
+v 0.09552478045225143 -0.017053674906492233 0.02212120220065117
+v -0.08335519582033157 0.022376984357833862 -0.005526112858206034
+v -0.09936285763978958 -0.016994841396808624 0.05411478504538536
+v 0.0968022570014 0.017033156007528305 0.030322037637233734
+v 0.09160291403532028 0.01695552095770836 0.005562833044677973
+v -0.09012892097234726 0.016734914854168892 0.06443186104297638
+v 0.09177957475185394 0.01571837067604065 0.06491253525018692
+v 0.0840023085474968 0.0018427835311740637 -0.02552490122616291
+v 0.08797289431095123 0.0030759950168430805 -0.02397872507572174
+v 0.09962863475084305 -0.016013137996196747 0.04930143058300018
+v -0.09201552718877792 -0.01844867318868637 0.058393169194459915
+v -0.08025997132062912 -0.0008337647304870188 -0.007321717217564583
+v -0.07576971501111984 -0.025980981066823006 -0.005082232877612114
+v -0.10019978880882263 0.015550931915640831 0.05467259883880615
+v 0.09941626340150833 0.015804897993803024 0.05497027188539505
+v 0.10374269634485245 -1.7281157852266915e-05 -0.00919930450618267
+v 0.08254846930503845 -0.0008678357116878033 -0.025870200246572495
+v 0.0875329002737999 -0.016812244430184364 -0.0012064524926245213
+v 0.08223627507686615 -0.016532698646187782 -0.005118109285831451
+v -0.09373555332422256 0.022707808762788773 0.014340022578835487
+v -0.09371249377727509 -0.012566703371703625 0.06516847014427185
+v -0.07666800171136856 -0.024650242179632187 -0.0069321258924901485
+v 0.08927122503519058 0.01713424362242222 0.0636143907904625
+v 0.08776598423719406 -0.0032150978222489357 -0.023884393274784088
+v -0.09726057201623917 -0.019229214638471603 0.05058842897415161
+v -0.09369184076786041 0.020670883357524872 0.04330829158425331
+v 0.09740705043077469 0.017585095018148422 0.051984645426273346
+v 0.09855398535728455 -0.01663215272128582 0.05473393574357033
+v 0.09344169497489929 -0.014617033302783966 0.06450004875659943
+v 0.08296618610620499 0.00381033169105649 -0.024449335411190987
+v -0.09092690050601959 -0.021324951201677322 0.0009798021055758
+v -0.09280849248170853 -0.0001125619382946752 0.06596215069293976
+v -0.0917946845293045 0.021482910960912704 0.0026841284707188606
+v 0.09998264163732529 -0.009323876351118088 0.058489199727773666
+v 0.08358591049909592 -0.0036368216387927532 -0.024606257677078247
+v 0.1001875177025795 0.012505676597356796 0.056894149631261826
+v -0.09290558844804764 0.015396904200315475 0.06455627083778381
+v 0.0851321741938591 0.016558213159441948 -0.0038727361243218184
+v 0.09294531494379044 -0.0005056463996879756 0.06595310568809509
+v 0.10115781426429749 -0.0036167786456644535 -0.012610324658453465
+v -0.07790137827396393 0.02295910380780697 -0.007399038877338171
+v -0.0857401043176651 0.024729391559958458 -0.0012316935462877154
+v -0.10016821324825287 -0.014623090624809265 0.055734917521476746
+v -0.09951794147491455 -0.018192630261182785 0.043814171105623245
+v 0.09070031344890594 -0.017254667356610298 0.0630820095539093
+v 0.0919061228632927 -0.016804175451397896 0.006295484956353903
+v 0.09953752160072327 0.016230100765824318 0.051584091037511826
+v -0.08118050545454025 0.025447947904467583 0.0035006047692149878
+v -0.09906721860170364 0.017129460349678993 0.05430515855550766
+v -0.08656162023544312 -0.00033731618896126747 -0.004163281060755253
+v -0.09461534768342972 -0.00031412430689670146 0.007574658375233412
+v -0.07529757916927338 0.026034310460090637 -0.005030847620218992
+v -0.08017436414957047 -0.02276112325489521 -0.006909539457410574
+v 0.0018608596874400973 0.03161578252911568 0.0011797059560194612
+v 0.0458698496222496 -0.0015001518186181784 -0.02592480182647705
+v -0.0817025899887085 0.024515172466635704 -0.005051423329859972
+v -0.10003473609685898 0.009941554628312588 0.05834079533815384
+v -0.09267213940620422 0.013539588078856468 0.0656878799200058
+v -0.09849356859922409 0.019268833100795746 0.0449417382478714
+v -0.09040140360593796 0.023869164288043976 0.004368236754089594
+v 0.0019865017384290695 -0.031597502529621124 0.001152931246906519
+v -0.09849606454372406 -1.593970591784455e-05 0.027081793174147606
+v 0.10398972034454346 -4.109224391868338e-05 0.005690876394510269
+v 0.09192700684070587 0.01342480443418026 0.06573130935430527
+f 5 18 3
+f 3 8 17
+f 17 8 9
+f 18 6 3
+f 10 9 4
+f 3 17 5
+f 14 12 10
+f 12 16 18
+f 14 16 12
+f 7 3 6
+f 18 16 6
+f 16 13 6
+f 10 12 18
+f 10 18 5
+f 10 5 9
+f 9 15 4
+f 1 15 9
+f 7 13 1
+f 3 7 8
+f 10 4 14
+f 5 17 9
+f 8 11 9
+f 11 1 9
+f 4 13 2
+f 15 13 4
+f 16 2 13
+f 2 16 14
+f 13 7 6
+f 1 13 15
+f 7 11 8
+f 2 14 4
+f 1 11 7
+f 23 36 21
+f 21 26 35
+f 35 26 27
+f 36 24 21
+f 28 27 22
+f 21 35 23
+f 32 30 28
+f 30 34 36
+f 32 34 30
+f 25 21 24
+f 36 34 24
+f 34 31 24
+f 28 30 36
+f 28 36 23
+f 28 23 27
+f 27 33 22
+f 19 33 27
+f 25 31 19
+f 21 25 26
+f 28 22 32
+f 23 35 27
+f 26 29 27
+f 29 19 27
+f 22 31 20
+f 33 31 22
+f 34 20 31
+f 20 34 32
+f 31 25 24
+f 19 31 33
+f 25 29 26
+f 20 32 22
+f 19 29 25
+f 80 97 57
+f 75 56 135
+f 87 135 41
+f 78 128 101
+f 128 57 101
+f 45 80 57
+f 120 135 92
+f 41 135 56
+f 75 135 120
+f 121 137 90
+f 114 67 120
+f 71 87 41
+f 37 135 87
+f 106 95 48
+f 119 53 64
+f 79 128 78
+f 97 60 57
+f 60 101 57
+f 110 137 121
+f 88 133 43
+f 63 61 104
+f 100 57 122
+f 135 109 93
+f 119 41 56
+f 44 84 137
+f 114 92 98
+f 106 48 51
+f 51 113 106
+f 132 106 113
+f 113 138 132
+f 71 41 99
+f 78 101 121
+f 104 128 112
+f 68 79 78
+f 128 104 61
+f 45 100 133
+f 100 45 57
+f 100 122 134
+f 100 94 133
+f 94 100 134
+f 128 61 126
+f 128 126 122
+f 128 122 57
+f 66 61 63
+f 129 86 115
+f 86 129 127
+f 115 66 63
+f 109 135 37
+f 62 114 98
+f 93 109 98
+f 92 93 98
+f 92 135 93
+f 102 119 56
+f 62 58 90
+f 84 102 56
+f 137 84 90
+f 74 106 132
+f 106 74 95
+f 80 132 81
+f 138 81 132
+f 119 85 41
+f 85 119 64
+f 85 99 41
+f 47 62 98
+f 83 104 112
+f 81 97 80
+f 43 133 39
+f 131 74 132
+f 133 123 45
+f 125 136 39
+f 126 61 66
+f 124 73 76
+f 105 54 69
+f 82 129 63
+f 129 96 127
+f 75 84 56
+f 68 121 90
+f 62 90 114
+f 87 71 69
+f 48 53 51
+f 64 53 48
+f 113 70 138
+f 44 137 108
+f 38 136 125
+f 74 50 95
+f 43 117 50
+f 112 79 58
+f 58 79 68
+f 58 68 90
+f 81 89 60
+f 89 110 121
+f 70 89 81
+f 89 70 110
+f 78 121 68
+f 89 121 101
+f 101 60 89
+f 50 74 131
+f 63 104 82
+f 125 39 46
+f 136 43 39
+f 127 54 124
+f 73 40 107
+f 124 76 86
+f 86 76 115
+f 63 129 115
+f 37 96 129
+f 37 87 96
+f 130 73 116
+f 109 37 129
+f 72 38 125
+f 117 95 50
+f 79 112 128
+f 65 70 59
+f 88 43 131
+f 131 43 50
+f 134 46 94
+f 123 133 88
+f 132 111 131
+f 111 88 131
+f 111 45 123
+f 54 105 124
+f 116 126 130
+f 46 134 107
+f 105 125 40
+f 40 125 107
+f 91 129 82
+f 109 129 91
+f 92 114 120
+f 84 44 102
+f 67 90 84
+f 67 75 120
+f 113 59 70
+f 65 59 108
+f 137 110 65
+f 65 108 137
+f 52 69 71
+f 83 62 47
+f 47 82 83
+f 62 83 58
+f 112 58 83
+f 97 81 60
+f 70 81 138
+f 70 65 110
+f 82 104 83
+f 111 123 88
+f 111 132 80
+f 45 111 80
+f 125 46 107
+f 39 94 46
+f 94 39 133
+f 115 130 66
+f 126 116 122
+f 116 134 122
+f 105 40 124
+f 124 40 73
+f 86 127 124
+f 84 75 67
+f 67 114 90
+f 103 108 51
+f 108 59 51
+f 44 103 102
+f 53 103 51
+f 55 42 72
+f 118 43 136
+f 117 43 118
+f 52 105 69
+f 91 82 47
+f 73 130 76
+f 126 66 130
+f 73 107 134
+f 116 73 134
+f 44 108 103
+f 51 59 113
+f 119 103 53
+f 49 99 85
+f 85 64 49
+f 77 99 49
+f 52 55 72
+f 99 42 71
+f 42 55 71
+f 55 52 71
+f 52 72 105
+f 117 49 95
+f 64 48 49
+f 130 115 76
+f 69 54 96
+f 96 54 127
+f 96 87 69
+f 77 117 118
+f 72 42 38
+f 99 118 42
+f 118 99 77
+f 105 72 125
+f 117 77 49
+f 48 95 49
+f 98 91 47
+f 119 102 103
+f 38 118 136
+f 98 109 91
+f 118 38 42
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/gripper_models/panda_pc.npy b/dimos/models/manipulation/contact_graspnet_pytorch/gripper_models/panda_pc.npy
new file mode 100644
index 0000000000..24d4abeaa5
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/gripper_models/panda_pc.npy differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/gripper_models/yumi_gripper/base.stl b/dimos/models/manipulation/contact_graspnet_pytorch/gripper_models/yumi_gripper/base.stl
new file mode 100644
index 0000000000..81e64db314
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/gripper_models/yumi_gripper/base.stl differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/gripper_models/yumi_gripper/base_coarse.stl b/dimos/models/manipulation/contact_graspnet_pytorch/gripper_models/yumi_gripper/base_coarse.stl
new file mode 100644
index 0000000000..e88c3ac313
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/gripper_models/yumi_gripper/base_coarse.stl differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/gripper_models/yumi_gripper/finger.stl b/dimos/models/manipulation/contact_graspnet_pytorch/gripper_models/yumi_gripper/finger.stl
new file mode 100644
index 0000000000..b74c6957e4
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/gripper_models/yumi_gripper/finger.stl differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/gripper_models/yumi_gripper/finger_coarse.stl b/dimos/models/manipulation/contact_graspnet_pytorch/gripper_models/yumi_gripper/finger_coarse.stl
new file mode 100644
index 0000000000..f825ded082
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/gripper_models/yumi_gripper/finger_coarse.stl differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/setup.py b/dimos/models/manipulation/contact_graspnet_pytorch/setup.py
new file mode 100644
index 0000000000..3c791e15d5
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/setup.py
@@ -0,0 +1,26 @@
+try:
+ from setuptools import setup, find_packages
+except ImportError:
+ from distutils.core import setup
+from distutils.extension import Extension
+import numpy
+
+# Get numpy include directory.
+numpy_include_dir = numpy.get_include()
+
+_packages = find_packages()
+
+packages = []
+for p in _packages:
+ if p.startswith('contact_graspnet_pytorch'):
+ packages.append(p)
+
+for p in packages:
+ assert p.startswith('contact_graspnet_pytorch')
+
+setup(
+ name='contact_graspnet_pytorch',
+ author='multiple',
+ packages=packages,
+ package_data={},
+)
\ No newline at end of file
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/test_data/0.npy b/dimos/models/manipulation/contact_graspnet_pytorch/test_data/0.npy
new file mode 100644
index 0000000000..33ffd921db
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/test_data/0.npy differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/test_data/1.npy b/dimos/models/manipulation/contact_graspnet_pytorch/test_data/1.npy
new file mode 100644
index 0000000000..70a59933ac
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/test_data/1.npy differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/test_data/10.npy b/dimos/models/manipulation/contact_graspnet_pytorch/test_data/10.npy
new file mode 100644
index 0000000000..502263f102
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/test_data/10.npy differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/test_data/11.npy b/dimos/models/manipulation/contact_graspnet_pytorch/test_data/11.npy
new file mode 100644
index 0000000000..20abc12fbb
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/test_data/11.npy differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/test_data/12.npy b/dimos/models/manipulation/contact_graspnet_pytorch/test_data/12.npy
new file mode 100644
index 0000000000..6fd56b2f5c
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/test_data/12.npy differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/test_data/13.npy b/dimos/models/manipulation/contact_graspnet_pytorch/test_data/13.npy
new file mode 100644
index 0000000000..b2c10b7995
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/test_data/13.npy differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/test_data/2.npy b/dimos/models/manipulation/contact_graspnet_pytorch/test_data/2.npy
new file mode 100644
index 0000000000..cc92bd5c97
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/test_data/2.npy differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/test_data/3.npy b/dimos/models/manipulation/contact_graspnet_pytorch/test_data/3.npy
new file mode 100644
index 0000000000..998832a6b7
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/test_data/3.npy differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/test_data/4.npy b/dimos/models/manipulation/contact_graspnet_pytorch/test_data/4.npy
new file mode 100644
index 0000000000..69e73aea6e
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/test_data/4.npy differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/test_data/5.npy b/dimos/models/manipulation/contact_graspnet_pytorch/test_data/5.npy
new file mode 100644
index 0000000000..129fdc97a0
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/test_data/5.npy differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/test_data/6.npy b/dimos/models/manipulation/contact_graspnet_pytorch/test_data/6.npy
new file mode 100644
index 0000000000..98441b916a
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/test_data/6.npy differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/test_data/7.npy b/dimos/models/manipulation/contact_graspnet_pytorch/test_data/7.npy
new file mode 100644
index 0000000000..70ac192c47
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/test_data/7.npy differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/test_data/8.npy b/dimos/models/manipulation/contact_graspnet_pytorch/test_data/8.npy
new file mode 100644
index 0000000000..ab1cd31f39
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/test_data/8.npy differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/test_data/9.npy b/dimos/models/manipulation/contact_graspnet_pytorch/test_data/9.npy
new file mode 100644
index 0000000000..7db1f5e2e0
Binary files /dev/null and b/dimos/models/manipulation/contact_graspnet_pytorch/test_data/9.npy differ
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/tools/benchmark_query_kd_np_tf.py b/dimos/models/manipulation/contact_graspnet_pytorch/tools/benchmark_query_kd_np_tf.py
new file mode 100644
index 0000000000..90f472fec4
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/tools/benchmark_query_kd_np_tf.py
@@ -0,0 +1,98 @@
+from __future__ import print_function
+import sys
+import numpy as np
+from scipy.spatial import cKDTree
+import time
+import tensorflow as tf
+from tqdm import tqdm
+
+N = 5000
+k = 1
+dim = 3
+radius = 0.1
+
+np_matmul_time = []
+np_sort_time = []
+kd_build_time = []
+kd_query_time = []
+tf_time = []
+tf_pointnet_time = []
+
+@tf.function
+def tf_nn(x):
+ dists = tf.norm(tf.expand_dims(x,0)-tf.expand_dims(x,1),axis=2)
+ _,topk = tf.math.top_k(dists, k=k, sorted=False)
+ return topk
+
+# np.random.randint*
+@tf.function
+def tf_queryball(x):
+ dists = tf.norm(tf.expand_dims(x,0)-tf.expand_dims(x,1),axis=2)
+ queries = tf.math.less(dists, radius)
+ idcs = tf.where(queries)
+ return idcs
+ # _,topk = tf.math.top_k(dists, k=k, sorted=False)
+ # return topk
+
+@tf.function
+def tf_knn_max_dist(x):
+
+ squared_dists_all = tf.norm(tf.expand_dims(x,0)-tf.expand_dims(x,1),axis=2)#tf.reduce_sum((tf.expand_dims(x,0)-tf.expand_dims(x,1))**2,axis=2)
+ neg_squared_dists_k, close_contact_pt_idcs = tf.math.top_k(-squared_dists_all, k=k, sorted=False)
+ squared_dists_k = -neg_squared_dists_k
+ loss_mask_pc = tf.cast(tf.reduce_mean(squared_dists_k, axis=1) < radius**2, tf.float32)
+ return loss_mask_pc
+
+# warmup tf
+a=np.random.rand(N,dim).astype(np.float32)
+tf_queryball(a)
+# tf_pointnet(a)
+tf_knn_max_dist(a)
+
+for i in tqdm(range(10)):
+ a=np.random.rand(N,dim).astype(np.float32)
+
+ # start_time_tf = time.time()
+ # tf_time.append(time.time()-start_time_tf)
+ start_time_tf = time.time()
+ f= tf_nn(a)
+ tf_time.append(time.time()-start_time_tf)
+
+
+ start_time_tf = time.time()
+ tf_queryball(a)
+ tf_pointnet_time.append(time.time()-start_time_tf)
+
+ start_time_np = time.time()
+ d=np.linalg.norm(a[np.newaxis,:,:]-a[:,np.newaxis,:],axis=2)
+ np_matmul_time.append(time.time() - start_time_np)
+ start_time_np = time.time()
+ sorted = np.argpartition(d, k, axis=1)
+ np_sort_time.append(time.time()-start_time_np)
+
+ start_time_kd = time.time()
+ tree = cKDTree(a, leafsize=a.shape[0]+1)
+ kd_build_time.append(time.time() - start_time_kd)
+ start_time_kd = time.time()
+ distances, ndx = tree.query(a, k=k, n_jobs=-1)
+ # tree.query_ball_point(a, 0.1)
+ kd_query_time.append(time.time()-start_time_kd)
+
+print('np_matmul_time: ', np.mean(np_matmul_time))
+print('np_sort_time: ', np.mean(np_sort_time))
+print('kd_build_time: ', np.mean(kd_build_time))
+print('kd_query_time: ', np.mean(kd_query_time))
+print('#'*100)
+print('np_brute: ', np.mean(np_sort_time) + np.mean(np_matmul_time))
+print('tf_brute: ', np.mean(tf_time))
+print('tf_pointnet: ', np.mean(tf_pointnet_time))
+print('kd: ', np.mean(kd_build_time) + np.mean(kd_query_time))
+
+# tf_pointnet_query ball: 0.0019501209259033202 (N=1024, k =20, radius=0.1)
+# tf_brute_force_query_ball: 0.0015201091766357422 (N=1024, k=all, radius=0.1)
+# tf_brute_force_knn_with_dist_thres: 0.0009593963623046875 (N=1024, k =20, radius=0.1)
+# ckdtree_time_total: 0.00626897811889648 (N=1024, k =20)
+# Notes:
+# 1.) queury ball + sampling is different than knn + dist thres, but for our disjoint sets we should use the latter.
+# 2.) If you import the cuda kernels, the tf_brute_force knn becomes 3x slower (0.0024s). That means that the cu kernels also change the tf behavior in different parts of the code.
+# 3.) When the pointnet guys created their repo, tf had no gpu implementation of some functions like tf.math.top_k(). Now they have, and it seems to be faster.
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/tools/color.py b/dimos/models/manipulation/contact_graspnet_pytorch/tools/color.py
new file mode 100644
index 0000000000..01cef77e3b
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/tools/color.py
@@ -0,0 +1,12 @@
+class color:
+ PURPLE = '\033[95m'
+ CYAN = '\033[96m'
+ DARKCYAN = '\033[36m'
+ BLUE = '\033[94m'
+ GREEN = '\033[92m'
+ YELLOW = '\033[93m'
+ RED = '\033[91m'
+ BOLD = '\033[1m'
+ UNDERLINE = '\033[4m'
+ END = '\033[0m'
+
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/tools/create_contact_infos.py b/dimos/models/manipulation/contact_graspnet_pytorch/tools/create_contact_infos.py
new file mode 100644
index 0000000000..a05db7c95d
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/tools/create_contact_infos.py
@@ -0,0 +1,154 @@
+import os
+import numpy as np
+import h5py
+import glob
+import argparse
+import sys
+import tqdm
+from multiprocessing import Pool, cpu_count
+
+ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+sys.path.append(ROOT_DIR)
+
+from contact_graspnet_pytorch.data import PointCloudReader
+from contact_graspnet_pytorch.mesh_utils import in_collision_with_gripper, grasp_contact_location
+
+def grasps_contact_info(grasp_tfs, successfuls, obj_mesh, check_collisions=True):
+ """
+ Check the collision of the grasps and compute contact points, normals and directions
+
+ Arguments:
+ grasp_tfs {np.ndarray} -- Mx4x4 grasp transformations
+ successfuls {np.ndarray} -- Binary Mx1 successful grasps
+ obj_mesh {trimesh.base.Trimesh} -- Mesh of the object
+
+ Keyword Arguments:
+ check_collisions {bool} -- whether to check for collisions (default: {True})
+
+ Returns:
+ [dict] -- object contact dictionary with all necessary information
+ """
+ print('evaluating {} grasps'.format(len(grasp_tfs)))
+ if check_collisions:
+ collisions, _ = in_collision_with_gripper(
+ obj_mesh,
+ grasp_tfs,
+ gripper_name='panda',
+ silent=True,
+ )
+ contact_dicts = grasp_contact_location(
+ grasp_tfs,
+ successfuls,
+ collisions if check_collisions else [0]*len(successfuls),
+ object_mesh=obj_mesh,
+ gripper_name='panda',
+ silent=True,
+ )
+
+ return contact_dicts
+
+def read_object_grasp_data_acronym(root_folder, h5_path):
+ """
+ Read object grasp data from the acronym dataset and loads mesh
+
+ Arguments:
+ root_folder {str} -- root folder of acronym dataset
+ h5_path {str} -- relative path to grasp h5 file
+
+ Returns:
+ [grasps, success, cad_path, cad_scale] -- grasp trafos, grasp success, absolute mesh path, mesh scale
+ """
+
+ abs_h5_path = os.path.join(root_folder, 'grasps', h5_path)
+ data = h5py.File(abs_h5_path, "r")
+ mesh_fname = os.path.join(root_folder, data["object/file"][()].decode('utf-8'))
+ mesh_scale = data["object/scale"][()]
+ grasps = np.array(data["grasps/transforms"])
+ success = np.array(data["grasps/qualities/flex/object_in_gripper"])
+
+ positive_grasps = grasps[success==1, :, :]
+ negative_grasps = grasps[success==0, :, :]
+
+ print('positive grasps: {} negative grasps: {}'.format(positive_grasps.shape, negative_grasps.shape))
+
+ return grasps, success, mesh_fname, mesh_scale
+
+def save_contact_data(pcreader, grasp_path, target_path='mesh_contacts'):
+ """
+ Maps acronym grasp data to contact information on meshes and saves them as npz file
+
+ Arguments:
+ grasp_path {str} -- path to grasp json file
+ pcreader {Object} -- PointCloudReader instance from data.py
+
+ """
+
+ target_path = os.path.join(pcreader._root_folder, target_path)
+ if not os.path.exists(target_path):
+ os.makedirs(target_path)
+
+ contact_dir_path = os.path.join(target_path, os.path.basename(grasp_path.replace('.h5','.npz')))
+ if not os.path.exists(os.path.dirname(contact_dir_path)):
+ os.makedirs(os.path.dirname(contact_dir_path))
+ if os.path.exists(contact_dir_path):
+ return
+
+ output_grasps, output_labels, cad_path, cad_scale = read_object_grasp_data_acronym(pcreader._root_folder, grasp_path)
+ # pcreader.change_object(cad_path, cad_scale)
+ pcreader.change_scene([cad_path], [cad_scale], [np.eye(4)])
+
+ context = pcreader._renderer._cache[(cad_path,cad_scale)]
+ obj_mesh = context['tmesh']
+ obj_mesh_mean = context['mesh_mean']
+
+ output_grasps[:,:3,3] -= obj_mesh_mean
+ contact_dicts = grasps_contact_info(output_grasps, list(output_labels), obj_mesh, check_collisions=False)
+
+ contact_dict_of_arrays = {}
+ for d in contact_dicts:
+ for k in d:
+ contact_dict_of_arrays.setdefault(k,[]).append(d[k])
+
+ np.savez(contact_dir_path, **contact_dict_of_arrays)
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(description="Grasp data reader")
+ parser.add_argument('root_folder', help='Root dir with acronym grasps, meshes and splits', type=str)
+ args = parser.parse_args()
+ print('Root folder', args.root_folder)
+
+ pcreader = PointCloudReader(root_folder=args.root_folder)
+
+ grasp_paths = glob.glob(os.path.join(args.root_folder, 'grasps', '*.h5'))
+ grasp_paths = [grasp_path.split('/')[-1] for grasp_path in grasp_paths]
+
+ # -- Remove objects that failed to waterproof -- #
+ with open(os.path.join(args.root_folder, 'failed.txt'), 'r') as f:
+ failed_meshes = [line.strip() for line in f.readlines()]
+
+ good_grasp_paths = []
+ for grasp_path in grasp_paths:
+ shapenet_id = grasp_path.split('/')[-1].split('_')[1]
+ if shapenet_id not in failed_meshes:
+ good_grasp_paths.append(grasp_path)
+ else:
+ print('Skipping failed mesh', shapenet_id)
+
+ grasp_paths = good_grasp_paths
+
+ print('Computing grasp contacts...')
+ for grasp_path in tqdm.tqdm(grasp_paths):
+ print('Reading: ', grasp_path)
+ save_contact_data(pcreader, grasp_path)
+
+ # def save_contact_data_wrapper(grasp_path):
+ # save_contact_data(pcreader, grasp_path)
+
+ # with Pool(cpu_count() - 4) as p:
+ # examples = list(
+ # tqdm.tqdm(
+ # p.imap_unordered(save_contact_data_wrapper,
+ # grasp_paths),
+ # total=len(grasp_paths)
+ # )
+ # )
\ No newline at end of file
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/tools/create_table_top_scenes.py b/dimos/models/manipulation/contact_graspnet_pytorch/tools/create_table_top_scenes.py
new file mode 100644
index 0000000000..deab73b4f6
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/tools/create_table_top_scenes.py
@@ -0,0 +1,511 @@
+import glob
+import os
+import random
+import trimesh
+import numpy as np
+import json
+import argparse
+import signal
+import trimesh.transformations as tra
+
+from acronym_tools import Scene, load_mesh, create_gripper_marker
+
+BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+
+def load_contacts(root_folder, data_splits, splits=['train'], min_pos_contacts=1):
+ """
+ Load grasps and contacts into memory
+
+ Arguments:
+ root_folder {str} -- path to acronym data
+ data_splits {dict} -- dict of categories of train/test object grasp files
+
+ Keyword Arguments:
+ splits {list} -- object data split(s) to use for scene generation
+ min_pos_contacts {int} -- minimum successful grasp contacts to consider object
+
+ Returns:
+ [dict] -- h5 file names as keys and grasp transforms + contact points as values
+ """
+ contact_infos = {}
+ for category_paths in data_splits.values():
+ for split in splits:
+ for grasp_path in category_paths[split]:
+ contact_path = os.path.join(root_folder, 'mesh_contacts', grasp_path.replace('.h5','.npz'))
+ if os.path.exists(contact_path):
+ npz = np.load(contact_path)
+ if 'contact_points' in npz:
+ all_contact_suc = npz['successful'].reshape(-1)
+ pos_idcs = np.where(all_contact_suc>0)[0]
+ if len(pos_idcs) > min_pos_contacts:
+ contact_infos[grasp_path] = {}
+ contact_infos[grasp_path]['successful'] = npz['successful']
+ contact_infos[grasp_path]['grasp_transform'] = npz['grasp_transform']
+ contact_infos[grasp_path]['contact_points'] = npz['contact_points']
+ if not contact_infos:
+ print('Warning: No mesh_contacts found. Please run tools/create_contact_infos.py first!')
+ return contact_infos
+
+def load_splits(root_folder):
+ """
+ Load splits of training and test objects
+
+ Arguments:
+ root_folder {str} -- path to acronym data
+
+ Returns:
+ [dict] -- dict of category-wise train/test object grasp files
+ """
+ split_dict = {}
+ split_paths = glob.glob(os.path.join(root_folder, 'splits/*.json'))
+ for split_p in split_paths:
+ category = os.path.basename(split_p).split('.json')[0]
+ splits = json.load(open(split_p,'r'))
+ split_dict[category] = {}
+ split_dict[category]['train'] = [obj_p.replace('.json', '.h5') for obj_p in splits['train']]
+ split_dict[category]['test'] = [obj_p.replace('.json', '.h5') for obj_p in splits['test']]
+ return split_dict
+
+class TableScene(Scene):
+ """
+ Holds current table-top scene, samples object poses and checks grasp collisions.
+
+ Arguments:
+ root_folder {str} -- path to acronym data
+ gripper_path {str} -- relative path to gripper collision mesh
+
+ Keyword Arguments:
+ lower_table {float} -- lower table to permit slight grasp collisions between table and object/gripper (default: {0.02})
+ """
+
+ def __init__(self, root_folder, gripper_path, lower_table=0.02, splits=['train']):
+
+ super().__init__()
+ self.root_folder = root_folder
+ self.splits= splits
+ self.gripper_mesh = trimesh.load(os.path.join(BASE_DIR, gripper_path))
+
+ self._table_dims = [1.0, 1.2, 0.6]
+ self._table_support = [0.6, 0.6, 0.6]
+ self._table_pose = np.eye(4)
+ self.table_mesh = trimesh.creation.box(self._table_dims)
+ self.table_support = trimesh.creation.box(self._table_support)
+
+ self.data_splits = load_splits(root_folder)
+ self.category_list = list(self.data_splits.keys())
+ self.contact_infos = load_contacts(root_folder, self.data_splits, splits=self.splits)
+
+ self._lower_table = lower_table
+
+ self._scene_count = 0
+
+ def get_random_object(self):
+
+ """Return random scaled but not yet centered object mesh
+
+ Returns:
+ [trimesh.Trimesh, str] -- ShapeNet mesh from a random category, h5 file path
+ """
+
+ while True:
+ random_category = random.choice(self.category_list)
+ cat_obj_paths = [obj_p for split in self.splits for obj_p in self.data_splits[random_category][split]]
+ if cat_obj_paths:
+ random_grasp_path = random.choice(cat_obj_paths)
+ if random_grasp_path in self.contact_infos:
+ break
+
+ obj_mesh = load_mesh(os.path.join(self.root_folder, 'grasps', random_grasp_path), self.root_folder)
+
+ mesh_mean = np.mean(obj_mesh.vertices, 0, keepdims=True)
+ obj_mesh.vertices -= mesh_mean
+
+ return obj_mesh, random_grasp_path
+
+ def _get_random_stable_pose(self, stable_poses, stable_poses_probs, thres=0.005):
+ """Return a stable pose according to their likelihood.
+
+ Args:
+ stable_poses (list[np.ndarray]): List of stable poses as 4x4 matrices.
+ stable_poses_probs (list[float]): List of probabilities.
+ thres (float): Threshold of pose stability to include for sampling
+
+ Returns:
+ np.ndarray: homogeneous 4x4 matrix
+ """
+
+
+ # Random pose with unique (avoid symmetric poses) stability prob > thres
+ _,unique_idcs = np.unique(stable_poses_probs.round(decimals=3), return_index=True)
+ unique_idcs = unique_idcs[::-1]
+ unique_stable_poses_probs = stable_poses_probs[unique_idcs]
+ n = len(unique_stable_poses_probs[unique_stable_poses_probs>thres])
+ index = unique_idcs[np.random.randint(n)]
+
+ # index = np.random.choice(len(stable_poses), p=stable_poses_probs)
+ inplane_rot = tra.rotation_matrix(
+ angle=np.random.uniform(0, 2.0 * np.pi), direction=[0, 0, 1]
+ )
+ return inplane_rot.dot(stable_poses[index])
+
+ def find_object_placement(self, obj_mesh, max_iter):
+ """Try to find a non-colliding stable pose on top of any support surface.
+
+ Args:
+ obj_mesh (trimesh.Trimesh): Object mesh to be placed.
+ max_iter (int): Maximum number of attempts to place to object randomly.
+
+ Raises:
+ RuntimeError: In case the support object(s) do not provide any support surfaces.
+
+ Returns:
+ bool: Whether a placement pose was found.
+ np.ndarray: Homogenous 4x4 matrix describing the object placement pose. Or None if none was found.
+ """
+ support_polys, support_T = self._get_support_polygons()
+ if len(support_polys) == 0:
+ raise RuntimeError("No support polygons found!")
+
+ # get stable poses for object
+ stable_obj = obj_mesh.copy()
+ stable_obj.vertices -= stable_obj.center_mass
+ stable_poses, stable_poses_probs = stable_obj.compute_stable_poses(
+ threshold=0, sigma=0, n_samples=20
+ )
+ # stable_poses, stable_poses_probs = obj_mesh.compute_stable_poses(threshold=0, sigma=0, n_samples=1)
+
+ # Sample support index
+ support_index = max(enumerate(support_polys), key=lambda x: x[1].area)[0]
+
+ iter = 0
+ colliding = True
+ while iter < max_iter and colliding:
+
+ # Sample position in plane
+ pts = trimesh.path.polygons.sample(
+ support_polys[support_index], count=1
+ )
+
+ # To avoid collisions with the support surface
+ pts3d = np.append(pts, 0)
+
+ # Transform plane coordinates into scene coordinates
+ placement_T = np.dot(
+ support_T[support_index],
+ trimesh.transformations.translation_matrix(pts3d),
+ )
+
+ pose = self._get_random_stable_pose(stable_poses, stable_poses_probs)
+
+ placement_T = np.dot(
+ np.dot(placement_T, pose), tra.translation_matrix(-obj_mesh.center_mass)
+ )
+
+ # Check collisions
+ colliding = self.is_colliding(obj_mesh, placement_T)
+
+ iter += 1
+
+ return not colliding, placement_T if not colliding else None
+
+ def is_colliding(self, mesh, transform, eps=1e-6):
+ """
+ Whether given mesh collides with scene
+
+ Arguments:
+ mesh {trimesh.Trimesh} -- mesh
+ transform {np.ndarray} -- mesh transform
+
+ Keyword Arguments:
+ eps {float} -- minimum distance detected as collision (default: {1e-6})
+
+ Returns:
+ [bool] -- colliding or not
+ """
+ dist = self.collision_manager.min_distance_single(mesh, transform=transform)
+ return dist < eps
+
+ def load_suc_obj_contact_grasps(self, grasp_path):
+ """
+ Loads successful object grasp contacts
+
+ Arguments:
+ grasp_path {str} -- acronym grasp path
+
+ Returns:
+ [np.ndarray, np.ndarray] -- Mx4x4 grasp transforms, Mx3 grasp contacts
+ """
+ contact_info = self.contact_infos[grasp_path]
+
+ suc_grasps = contact_info['successful'].reshape(-1)
+ gt_grasps = contact_info['grasp_transform'].reshape(-1,4,4)
+ gt_contacts = contact_info['contact_points'].reshape(-1,3)
+
+ suc_gt_contacts = gt_contacts[np.repeat(suc_grasps,2)>0]
+ suc_gt_grasps = gt_grasps[suc_grasps>0]
+
+ return suc_gt_grasps, suc_gt_contacts
+
+ def set_mesh_transform(self, name, transform):
+ """
+ Set mesh transform for collision manager
+
+ Arguments:
+ name {str} -- mesh name
+ transform {np.ndarray} -- 4x4 homog mesh pose
+ """
+ self.collision_manager.set_transform(name, transform)
+ self._poses[name] = transform
+
+ def save_scene_grasps(self, output_dir, scene_filtered_grasps, scene_filtered_contacts, obj_paths, obj_transforms, obj_scales, obj_grasp_idcs):
+ """
+ Save scene_contact infos in output_dir
+
+ Arguments:
+ output_dir {str} -- absolute output directory
+ scene_filtered_grasps {np.ndarray} -- Nx4x4 filtered scene grasps
+ scene_filtered_contacts {np.ndarray} -- Nx2x3 filtered scene contacts
+ obj_paths {list} -- list of object paths in scene
+ obj_transforms {list} -- list of object transforms in scene
+ obj_scales {list} -- list of object scales in scene
+ obj_grasp_idcs {list} -- list of starting grasp idcs for each object
+ """
+ if not os.path.exists(output_dir):
+ os.makedirs(output_dir)
+
+ contact_info = {}
+ contact_info['obj_paths'] = obj_paths
+ contact_info['obj_transforms'] = obj_transforms
+ contact_info['obj_scales'] = obj_scales
+ contact_info['grasp_transforms'] = scene_filtered_grasps
+ contact_info['scene_contact_points'] = scene_filtered_contacts
+ contact_info['obj_grasp_idcs'] = np.array(obj_grasp_idcs)
+ output_path = os.path.join(output_dir, '{:06d}.npz'.format(self._scene_count))
+ while os.path.exists(output_path):
+ self._scene_count += 1
+ output_path = os.path.join(output_dir, '{:06d}.npz'.format(self._scene_count))
+ np.savez(output_path, **contact_info)
+ self._scene_count += 1
+
+ def _transform_grasps(self, grasps, contacts, obj_transform):
+ """
+ Transform grasps and contacts into given object transform
+
+ Arguments:
+ grasps {np.ndarray} -- Nx4x4 grasps
+ contacts {np.ndarray} -- 2Nx3 contacts
+ obj_transform {np.ndarray} -- 4x4 mesh pose
+
+ Returns:
+ [np.ndarray, np.ndarray] -- transformed grasps and contacts
+ """
+ transformed_grasps = np.matmul(obj_transform, grasps)
+ contacts_homog = np.concatenate((contacts, np.ones((contacts.shape[0], 1))),axis=1)
+ transformed_contacts = np.dot(contacts_homog, obj_transform.T)[:,:3]
+ return transformed_grasps, transformed_contacts
+
+ def _filter_colliding_grasps(self, transformed_grasps, transformed_contacts):
+ """
+ Filter out colliding grasps
+
+ Arguments:
+ transformed_grasps {np.ndarray} -- Nx4x4 grasps
+ transformed_contacts {np.ndarray} -- 2Nx3 contact points
+
+ Returns:
+ [np.ndarray, np.ndarray] -- Mx4x4 filtered grasps, Mx2x3 filtered contact points
+ """
+ filtered_grasps = []
+ filtered_contacts = []
+ for i,g in enumerate(transformed_grasps):
+ if not self.is_colliding(self.gripper_mesh, g):
+ filtered_grasps.append(g)
+ filtered_contacts.append(transformed_contacts[2*i:2*(i+1)])
+ return np.array(filtered_grasps).reshape(-1,4,4), np.array(filtered_contacts).reshape(-1,2,3)
+
+ def reset(self):
+ """
+ Reset, i.e. remove scene objects
+ """
+ for name in self._objects:
+ self.collision_manager.remove_object(name)
+ self._objects = {}
+ self._poses = {}
+ self._support_objects = []
+
+ def load_existing_scene(self, path):
+ """
+ Load an existing scene_contacts scene for visualization
+
+ Arguments:
+ path {str} -- abs path to scene_contacts npz file
+
+ Returns:
+ [np.ndarray, list, list] -- scene_grasps, list of obj paths, list of object transforms
+ """
+ self.add_object('table', self.table_mesh, self._table_pose)
+ self._support_objects.append(self.table_support)
+
+ inp = np.load(os.path.join(self.root_folder, path))
+ scene_filtered_grasps = inp['grasp_transforms']
+ scene_contacts = inp['scene_contact_points']
+ obj_transforms = inp['obj_transforms']
+ obj_paths = inp['obj_paths']
+ obj_scales = inp['obj_scales']
+
+ for obj_path,obj_transform,obj_scale in zip(obj_paths,obj_transforms,obj_scales):
+ obj_mesh = trimesh.load(os.path.join(self.root_folder, obj_path))
+ obj_mesh = obj_mesh.apply_scale(obj_scale)
+ mesh_mean = np.mean(obj_mesh.vertices, 0, keepdims=True)
+ obj_mesh.vertices -= mesh_mean
+ self.add_object(obj_path, obj_mesh, obj_transform)
+ return scene_filtered_grasps, scene_contacts, obj_paths, obj_transforms
+
+
+ def handler(self, signum, frame):
+ raise Exception("Could not place object ")
+
+ def arrange(self, num_obj, max_iter=100, time_out = 8):
+ """
+ Arrange random table top scene with contact grasp annotations
+
+ Arguments:
+ num_obj {int} -- number of objects
+
+ Keyword Arguments:
+ max_iter {int} -- maximum iterations to try placing an object (default: {100})
+ time_out {int} -- maximum time to try placing an object (default: {8})
+
+ Returns:
+ [np.ndarray, np.ndarray, list, list, list, list] --
+ scene_filtered_grasps, scene_filtered_contacts, obj_paths, obj_transforms, obj_scales, obj_grasp_idcs
+
+ """
+
+ self._table_pose[2,3] -= self._lower_table
+ self.add_object('table', self.table_mesh, self._table_pose)
+
+ self._support_objects.append(self.table_support)
+
+ obj_paths = []
+ obj_transforms = []
+ obj_scales = []
+ grasp_paths = []
+
+ for i in range(num_obj):
+ obj_mesh, random_grasp_path = self.get_random_object()
+ signal.signal(signal.SIGALRM, self.handler)
+ signal.alarm(8)
+ try:
+ success, placement_T = self.find_object_placement(obj_mesh, max_iter)
+ except Exception as exc:
+ print(exc, random_grasp_path, " after {} seconds!".format(time_out))
+ continue
+ signal.alarm(0)
+ if success:
+ self.add_object(random_grasp_path, obj_mesh, placement_T)
+ obj_scales.append(float(random_grasp_path.split('_')[-1].split('.h5')[0]))
+ obj_paths.append(os.path.join('meshes', '/'.join(random_grasp_path.split('_')[:2]) + '.obj'))
+ obj_transforms.append(placement_T)
+ grasp_paths.append(random_grasp_path)
+ else:
+ print("Couldn't place object", random_grasp_path, " after {} iterations!".format(max_iter))
+ print('Placed {} objects'.format(len(obj_paths)))
+
+ # self.set_mesh_transform('table', self._table_pose)
+
+ scene_filtered_grasps = []
+ scene_filtered_contacts = []
+ obj_grasp_idcs = []
+ grasp_count = 0
+
+ for obj_transform, grasp_path in zip(obj_transforms, grasp_paths):
+ grasps, contacts = self.load_suc_obj_contact_grasps(grasp_path)
+ transformed_grasps, transformed_contacts = self._transform_grasps(grasps, contacts, obj_transform)
+ filtered_grasps, filtered_contacts = self._filter_colliding_grasps(transformed_grasps, transformed_contacts)
+
+ scene_filtered_grasps.append(filtered_grasps)
+ scene_filtered_contacts.append(filtered_contacts)
+ grasp_count += len(filtered_contacts)
+ obj_grasp_idcs.append(grasp_count)
+
+ scene_filtered_grasps = np.concatenate(scene_filtered_grasps,0)
+ scene_filtered_contacts = np.concatenate(scene_filtered_contacts,0)
+
+ self._table_pose[2,3] += self._lower_table
+ self.set_mesh_transform('table', self._table_pose)
+
+ return scene_filtered_grasps, scene_filtered_contacts, obj_paths, obj_transforms, obj_scales, obj_grasp_idcs
+
+ def visualize(self, scene_grasps, scene_contacts=None):
+ """
+ Visualizes table top scene with grasps
+
+ Arguments:
+ scene_grasps {np.ndarray} -- Nx4x4 grasp transforms
+ scene_contacts {np.ndarray} -- Nx2x3 grasp contacts
+ """
+ print('Visualizing scene and grasps.. takes time')
+
+ gripper_marker = create_gripper_marker(color=[0, 255, 0])
+ gripper_markers = [gripper_marker.copy().apply_transform(t) for t in scene_grasps]
+
+ colors = np.ones((scene_contacts.shape[0]*2,4))*255
+ colors[:,0:2] = 0
+ scene_contact_scene = trimesh.Scene(trimesh.points.PointCloud(scene_contacts.reshape(-1,3), colors=colors))
+
+ # show scene together with successful and collision-free grasps of all objects
+ trimesh.scene.scene.append_scenes(
+ [self.colorize().as_trimesh_scene(), trimesh.Scene(gripper_markers), scene_contact_scene]
+ ).show()
+
+if __name__ == "__main__":
+
+ parser = argparse.ArgumentParser(description="Grasp data reader")
+ parser.add_argument('root_folder', help='Root dir with grasps, meshes, mesh_contacts and splits', type=str)
+ parser.add_argument('--num_grasp_scenes', type=int, default=10000)
+ parser.add_argument('--splits','--list', nargs='+')
+ parser.add_argument('--max_iterations', type=int, default=100)
+ parser.add_argument('--gripper_path', type=str, default='gripper_models/panda_gripper/panda_gripper.obj')
+ parser.add_argument('--min_num_objects', type=int, default=8)
+ parser.add_argument('--max_num_objects', type=int, default=12)
+ parser.add_argument('--start_index', type=int, default=0)
+ parser.add_argument('--load_existing', type=str, default=None)
+ parser.add_argument('--output_dir', type=str, default='scene_contacts')
+ parser.add_argument('-vis', action='store_true', default=False)
+ args = parser.parse_args()
+
+ root_folder = args.root_folder
+ splits = args.splits if args.splits else ['train']
+ max_iterations = args.max_iterations
+ gripper_path = args.gripper_path
+ number_of_scenes = args.num_grasp_scenes
+ min_num_objects = args.min_num_objects
+ max_num_objects = args.max_num_objects
+ start_index = args.start_index
+ load_existing = args.load_existing
+ output_dir = args.output_dir
+ visualize = args.vis
+
+ table_scene = TableScene(root_folder, gripper_path, splits=splits)
+ table_scene._scene_count = start_index
+
+ print('Root folder', args.root_folder)
+ output_dir = os.path.join(root_folder, output_dir)
+
+ while table_scene._scene_count < number_of_scenes:
+
+ table_scene.reset()
+
+ if load_existing is None:
+ print('generating %s/%s' % (table_scene._scene_count, number_of_scenes))
+ num_objects = np.random.randint(min_num_objects,max_num_objects+1)
+ scene_grasps, scene_contacts, obj_paths, obj_transforms, obj_scales, obj_grasp_idcs = table_scene.arrange(num_objects, max_iterations)
+ if not visualize:
+ table_scene.save_scene_grasps(output_dir, scene_grasps, scene_contacts, obj_paths, obj_transforms, obj_scales, obj_grasp_idcs)
+ else:
+ scene_grasps,scene_contacts, _,_ = table_scene.load_existing_scene(load_existing)
+
+ if visualize:
+ table_scene.visualize(scene_grasps, scene_contacts)
+ table_scene._scene_count +=1
\ No newline at end of file
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/tools/dict_diff.py b/dimos/models/manipulation/contact_graspnet_pytorch/tools/dict_diff.py
new file mode 100644
index 0000000000..2a1a9e11e1
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/tools/dict_diff.py
@@ -0,0 +1,28 @@
+from easydict import EasyDict as edict
+import copy
+
+def findDiff(d1, d2, path="", diff_dict = {}):
+ orig_path = copy.deepcopy(path)
+ for k in d1.keys():
+ if not d2.has_key(k):
+ print path, ":"
+ print "keys not in d2: " + k, "\n"
+ else:
+ if type(d1[k]) in [edict, dict]:
+ if path == "":
+ path = k
+ else:
+ path = path + "->" + k
+ diff_dict = findDiff(d1[k],d2[k], path, diff_dict)
+ path = orig_path
+ else:
+ if d1[k] != d2[k]:
+ print path, ":"
+ print " - ", k," : ", d1[k]
+ print " + ", k," : ", d2[k]
+ diff_dict[k] = d2[k]
+ diff_dict[k + '_dictpath'] = copy.deepcopy(path)
+ # path=""
+
+ return diff_dict
+
\ No newline at end of file
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/tools/plot_pr.py b/dimos/models/manipulation/contact_graspnet_pytorch/tools/plot_pr.py
new file mode 100644
index 0000000000..8e77b692a2
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/tools/plot_pr.py
@@ -0,0 +1,237 @@
+import numpy as np
+import matplotlib.pyplot as plt
+from dict_diff import findDiff
+import sys
+import glob2
+import os
+import yaml
+sys.path.append('/home/msundermeyer/ngc_ws/6dof-graspnet/contact_graspnet')
+import utilities
+import copy
+from color import color
+from scipy import spatial
+
+
+def metric_coverage_success_rate(grasps_list, scores_list, flex_outcomes_list, gt_grasps_list, visualize, num_scenes=100):
+ """
+ Computes the coverage success rate for grasps of multiple objects.
+
+ Args:
+ grasps_list: list of numpy array, each numpy array is the predicted
+ grasps for each object. Each numpy array has shape (n, 4, 4) where
+ n is the number of predicted grasps for each object.
+ scores_list: list of numpy array, each numpy array is the predicted
+ scores for each grasp of the corresponding object.
+ flex_outcomes_list: list of numpy array, each element of the numpy
+ array indicates whether that grasp succeeds in grasping the object
+ or not.
+ gt_grasps_list: list of numpy array. Each numpy array has shape of
+ (m, 4, 4) where m is the number of groundtruth grasps for each
+ object.
+ visualize: bool. If True, it will plot the curve.
+
+ Returns:
+ auc: float, area under the curve for the success-coverage plot.
+ """
+ RADIUS = 0.02
+
+ all_trees = []
+ all_grasps = []
+ all_object_indexes = []
+ all_scores = []
+ all_flex_outcomes = []
+ visited = set()
+ tot_num_gt_grasps = 0
+ for i in range(num_scenes):
+ print('building kd-tree {}/{}'.format(i+1, len(grasps_list[:num_scenes])))
+ gt_grasps = np.asarray(gt_grasps_list[i]).copy()
+ all_trees.append(spatial.KDTree(gt_grasps[:, :3, 3]))
+ tot_num_gt_grasps += gt_grasps.shape[0]
+ print(np.mean(flex_outcomes_list[i]))
+ try:
+ print(len(grasps_list[i]), len(scores_list[i].reshape(-1)), len(flex_outcomes_list[i]))
+ except:
+ import pdb; pdb.set_trace()
+ for g, s, f in zip(grasps_list[i], scores_list[i].reshape(-1), flex_outcomes_list[i]):
+ all_grasps.append(np.asarray(g).copy())
+ all_object_indexes.append(i)
+ all_scores.append(s)
+ all_flex_outcomes.append(f)
+
+ all_grasps = np.asarray(all_grasps)
+
+ all_scores = np.asarray(all_scores)
+ order = np.argsort(-all_scores)
+ num_covered_so_far = 0
+ correct_grasps_so_far = 0
+ num_visited_grasps_so_far = 0
+
+ precisions = []
+ recalls = []
+ prev_score = None
+
+ for oindex, index in enumerate(order):
+ if oindex % 1000 == 0:
+ print(oindex, len(order))
+
+ object_id = all_object_indexes[index]
+ close_indexes = all_trees[object_id].query_ball_point(all_grasps[index, :3, 3], RADIUS)
+
+ num_new_covered_gt_grasps = 0
+
+ for close_index in close_indexes:
+ key = (object_id, close_index)
+ if key in visited:
+ continue
+
+ visited.add(key)
+ num_new_covered_gt_grasps += 1
+
+ correct_grasps_so_far += all_flex_outcomes[index]
+ num_visited_grasps_so_far += 1
+ num_covered_so_far += num_new_covered_gt_grasps
+ if prev_score is not None and abs(prev_score - all_scores[index]) < 1e-3:
+ precisions[-1] = float(correct_grasps_so_far) / num_visited_grasps_so_far
+ recalls[-1] = float(num_covered_so_far) / tot_num_gt_grasps
+ else:
+ precisions.append(float(correct_grasps_so_far) / num_visited_grasps_so_far)
+ recalls.append(float(num_covered_so_far) / tot_num_gt_grasps)
+ prev_score = all_scores[index]
+
+ auc = 0
+ for i in range(1, len(precisions)):
+ auc += (recalls[i] - recalls[i-1]) * (precisions[i] + precisions[i-1]) * 0.5
+
+ if visualize:
+ import matplotlib.pyplot as plt
+ plt.plot(recalls, precisions)
+ plt.title('Simulator Precision-Coverage Curves auc = {0:02f}'.format(auc))
+ plt.xlabel('Coverage')
+ plt.ylabel('Precision')
+ plt.xlim([0, 0.6])
+ plt.ylim([0,1.0])
+ # plt.show()
+
+
+ return auc, {'precisions': precisions, 'recalls': recalls, 'auc': auc, 'cfg': None}
+
+# pr_data = glob2.glob(os.path.join(sys.argv[1],'*','*','outfile.npy')) + glob2.glob(os.path.join(sys.argv[1],'*','outfile.npy')) + glob2.glob(os.path.join(sys.argv[1],'outfile.npy'))
+pr_data = glob2.glob(os.path.join(sys.argv[1],'*','*','all_full_results.npz')) + glob2.glob(os.path.join(sys.argv[1],'*','all_full_results.npz')) + glob2.glob(os.path.join(sys.argv[1],'all_full_results.npz'))
+
+default_compare = True
+if default_compare:
+ default_config = utilities.load_config('/home/msundermeyer/ngc_ws/6dof-graspnet/contact_graspnet')
+else:
+ default_config = np.load(pr_data[0], allow_pickle=True).item()['cfg']
+
+legends = []
+all_diff_dicts = {}
+cfgs = {}
+aucs_01 = {}
+name_dict = {}
+
+gt_grasps = []
+for p in range(100):
+ y=np.load('/home/msundermeyer/datasets/visibility_filtered_gt_grasp/{}_filtered_gt_grasps.npz'.format(p), allow_pickle=True)
+ gt_grasps.append(y['gt_grasp_scene_trafos'])
+
+
+
+for abc in pr_data:
+ try:
+ x=np.load(os.path.join(os.path.dirname(abc), 'flex_temp', 'all_full_results.npz'), allow_pickle=True)
+ except:
+ x=np.load(os.path.join(os.path.dirname(os.path.dirname(abc)), 'flex_temp', 'all_full_results.npz'), allow_pickle=True)
+
+ auc, _ = metric_coverage_success_rate(x['grasps'], x['scores'], x['flex_outcomes'], gt_grasps, True)
+
+ base_dir = os.path.dirname(os.path.dirname(abc))
+ outfile_data = glob2.glob(os.path.join(base_dir,'*','*','outfile.npy')) + glob2.glob(os.path.join(base_dir,'*','outfile.npy')) + glob2.glob(os.path.join(base_dir,'outfile.npy'))
+
+ if outfile_data:
+ d = outfile_data[0]
+ print(d)
+ a=np.load(d, allow_pickle=True).item()
+
+ if d.split('/')[2] == 'outfile.npy':
+ names = os.listdir(os.path.dirname(d))
+ print(names)
+ name = names[0]
+ else:
+ try:
+ name = d.split('/')[5]
+ except:
+ name = d.split('/')[4]
+
+ print(50*'#')
+ print(name)
+ all_diff_dicts[name] = copy.deepcopy(findDiff(default_config, a['cfg'], diff_dict={}))
+ cfgs[name] = a['cfg']
+ print(all_diff_dicts)
+
+ b=[]
+ for g in x['scores']:
+ b += list(g)
+ print(np.histogram(b))
+ i = np.argsort(b)
+ o = []
+ for g in x['flex_outcomes']:
+ o += list(g)
+ # np.histogram(np.array(o)[i[-1000:]])
+ # np.histogram(np.array(o)[i[-4000:-2000]])
+ # np.histogram(np.array(o)[i[-4000:-3000]])
+ print(np.histogram(o))
+ print(a['auc'])
+ aucs_01[name] = a['auc']
+ print(50*'#')
+ recalls = np.array(a['recalls'])
+ precisions = np.array(a['precisions'])
+ recalls_01 = recalls[recalls<0.1]
+ precisions_01 = precisions[recalls<0.1]
+
+ deltas = recalls_01[1:]-recalls_01[:-1]
+ aucs_01[name] = np.dot(deltas,precisions_01[:-1])*10
+
+
+ # plt.plot(a['recalls'], a['precisions'])
+ legends.append(d.split('/')[1] + ' - ' + name + ' (auc: {:.4f})'.format(auc))
+ # legends.append(d.split('/')[1] + ' - ' + name + ' (auc: {:.4f})'.format(a['auc']) + ' (auc01: {:.4f})'.format(aucs_01[name]))
+ else:
+ legends.append(base_dir)
+
+
+all_changed_keys = {k:v for d in all_diff_dicts for k,v in all_diff_dicts[d].items()}
+stri = (1+len(all_diff_dicts))*"{:<10}"
+
+print("{:<30}".format("Parameter") + stri.format('default' ,*[d for d in all_diff_dicts]))
+for k,v in all_changed_keys.items():
+ if '_dictpath' in k:
+ keys = v.split('->')
+ cfg_tmp = copy.deepcopy(default_config)
+ if keys[0] == "":
+ continue
+ for k1 in keys:
+ cfg_tmp = cfg_tmp[k1]
+
+ string = "{:<30} {:<10}".format(k[:-9], cfg_tmp[k[:-9]])
+ for d in all_diff_dicts:
+ if k in all_diff_dicts[d]:
+ value = all_diff_dicts[d][k[:-9]]
+ string += color.GREEN + "{:<10}".format(value) + color.END
+ else:
+ value = cfg_tmp[k[:-9]]
+ string += "{:<10}".format(value)
+
+ print(string)
+
+
+
+plt.title('Simulator Precision-Coverage Curves')
+plt.legend(legends)
+plt.xlabel('Coverage')
+plt.ylabel('Precision')
+plt.xlim([0, 0.6])
+plt.ylim([0,1.0])
+
+plt.show()
+
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/tools/ply_to_npz.py b/dimos/models/manipulation/contact_graspnet_pytorch/tools/ply_to_npz.py
new file mode 100644
index 0000000000..d42cc531b6
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/tools/ply_to_npz.py
@@ -0,0 +1,35 @@
+import numpy as np
+import sys
+
+def read_ply_file(ply_file, load_cols=True):
+ """Naive conversion from ply to numpy arrays
+
+ Arguments:
+ ply_file {str} -- [description]
+
+ Keyword Arguments:
+ load_cols {bool} -- load vertex colors (default: {True})
+
+ Returns:
+ dict -- vertex coordinates and optionally colors
+ """
+ ret_dict = {}
+ assert ply_file.endswith('.ply')
+ with open(ply_file, 'r') as f:
+ lines = f.readlines()
+ verts_num = int(lines[3].split(' ')[-1])
+ verts_lines = lines[11:11 + 2*verts_num:2]
+ ret_dict['xyz'] = np.array([list(map(float, l.strip().split(' '))) for l in verts_lines])
+ if load_cols:
+ cols_lines = lines[12:12 + 2*verts_num:2]
+ ret_dict['xyz_color'] = np.array([list(map(int, l.strip().split(' '))) for l in cols_lines])
+
+ return ret_dict
+
+file_name = sys.argv[1]
+ret_dict = read_ply_file(file_name)
+
+# OpenGL to OpenCV
+ret_dict['xyz'][:,1:] = -ret_dict['xyz'][:,1:]
+
+np.savez(file_name.replace('.ply','.npz'), xyz=ret_dict['xyz'], xyz_color=ret_dict['xyz_color'])
\ No newline at end of file
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/tools/refactor_mesh_dir.py b/dimos/models/manipulation/contact_graspnet_pytorch/tools/refactor_mesh_dir.py
new file mode 100644
index 0000000000..8dcb7310a9
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/tools/refactor_mesh_dir.py
@@ -0,0 +1,28 @@
+import os
+import tqdm
+
+
+BASE_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+ACRONYM_PATH = os.path.join(BASE_PATH, 'acronym')
+GRASPDIR = os.path.join(ACRONYM_PATH, "grasps")
+MESHDIR = os.path.join(ACRONYM_PATH, "meshes")
+
+with open(os.path.join(ACRONYM_PATH, 'failed.txt'), 'r') as f:
+ failed_meshes = f.readlines()
+ failed_meshes = [line.strip() for line in failed_meshes]
+
+for fname in tqdm.tqdm(os.listdir(GRASPDIR)):
+ tokens = fname.split("_")
+ assert(len(tokens) == 3)
+ obj_type = tokens[0]
+ obj_hash = tokens[1]
+ obj_scale = tokens[2].split(".")[0]
+
+ if not os.path.exists(os.path.join(MESHDIR, f'{obj_type}')):
+ os.makedirs(os.path.join(MESHDIR, f'{obj_type}'))
+ # move object to folder
+
+ ori_path = os.path.join(MESHDIR, f'{obj_hash}.obj')
+ new_path = os.path.join(MESHDIR, f'{obj_type}', f'{obj_hash}.obj')
+ if not os.path.exists(new_path) and obj_hash not in failed_meshes:
+ os.rename(ori_path, new_path)
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/tools/waterproof_meshes.py b/dimos/models/manipulation/contact_graspnet_pytorch/tools/waterproof_meshes.py
new file mode 100644
index 0000000000..5652ca6713
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/tools/waterproof_meshes.py
@@ -0,0 +1,143 @@
+"""
+convert_meshes.py
+Process the .obj files and make them waterproof and simplified.
+
+Based on: https://github.com/NVlabs/acronym/issues/6
+"""
+import os
+import glob
+import yaml
+import subprocess
+from tqdm import tqdm
+from multiprocessing import Pool, cpu_count
+
+BASE_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+ACRONYM_PATH = os.path.join(BASE_PATH, 'acronym')
+GRASPDIR = os.path.join(ACRONYM_PATH, "grasps")
+MANIFOLD_PATH = os.path.join(ACRONYM_PATH, "Manifold", "build", "manifold")
+SIMPLIFY_PATH = os.path.join(ACRONYM_PATH, "Manifold", "build", "simplify")
+
+# -- Create log for failed files -- #
+failed_file = os.path.join(ACRONYM_PATH, 'failed.txt')
+if not os.path.isfile(failed_file):
+ with open(failed_file, 'w') as f:
+ f.write('')
+
+dne_file = os.path.join(ACRONYM_PATH, 'dne.txt')
+if not os.path.isfile(dne_file):
+ with open(dne_file, 'w') as f:
+ f.write('')
+
+# -- Get all hashes -- #
+hashes = []
+for fname in os.listdir(GRASPDIR):
+ tokens = fname.split("_")
+ assert(len(tokens) == 3)
+ hashes.append(tokens[1])
+
+print(len(hashes))
+print(len(os.listdir(os.path.join(ACRONYM_PATH, 'models'))))
+
+# -- Create output directory if it doesn't exist -- #
+out_name = 'meshes'
+if not os.path.exists(os.path.join(ACRONYM_PATH, out_name)):
+ os.makedirs(os.path.join(ACRONYM_PATH, out_name))
+
+REGENENERATE = [
+ # '49a0a84ee5a91344b11c5cce13f76151',
+ # 'feb146982d0c64dfcbf4f3f04bbad8',
+ # '202fd2497d2e85f0dd6c14adedcbd4c3',
+ # '47fc4a2417ca259f894dbff6c6a6be89',
+ # '2a7d62b731a04f5fa54b9afa882a89ed',
+ # '8123f469a08a88e7761dc3477e65a72',
+
+ # '202fd2497d2e85f0dd6c14adedcbd4c3'
+ # 'feb146982d0c64dfcbf4f3f04bbad8',
+ # '47fc4a2417ca259f894dbff6c6a6be89',
+ # 'feb146982d0c64dfcbf4f3f04bbad8',
+ # '2a7d62b731a04f5fa54b9afa882a89ed',
+ # 'b54e412afd42fe6da7c64d6a7060b75b',
+ '41efae18a5376bb4fc50236094ae9e18',
+]
+
+def write_failed(h):
+ with open(failed_file, 'a') as f:
+ f.write(f'{h}\n')
+
+def write_dne(h):
+ with open(dne_file, 'a') as f:
+ f.write(f'{h}\n')
+
+def remove_temp(temp_name):
+ if os.path.isfile(temp_name):
+ os.remove(temp_name)
+
+## Define function to process a single file
+def process_hash(h):
+ """Process a single object file by calling a subshell with the mesh processing script.
+
+ Args:
+ h (string): the hash denoting the file type
+ """
+ obj = os.path.join(ACRONYM_PATH, 'models/', h + ".obj")
+ temp_name = os.path.join(ACRONYM_PATH, f"temp.{h}.watertight.obj")
+ outfile = os.path.join(ACRONYM_PATH, "meshes/", h + ".obj")
+ outfile_search = os.path.join(ACRONYM_PATH, "meshes/", '*/', h + ".obj")
+
+ if h in REGENENERATE:
+ print(f'Regenerating: {h}')
+ else:
+ # File already done
+ # if os.path.isfile(outfile):
+ if glob.glob(outfile_search) or glob.glob(outfile):
+ # print(f'{outfile} already done')
+ # Get rid of existing temp files
+ if os.path.isfile(temp_name):
+ os.remove(temp_name)
+ print(f'removed: {temp_name}')
+ return
+
+ if not os.path.isfile(obj):
+ print(f'File does not exist: {h}')
+ write_dne(h)
+ return
+
+ # Waterproof the object
+ completed = subprocess.CompletedProcess(args=[], returncode=0)
+ if not os.path.isfile(temp_name):
+ completed = subprocess.run(["timeout", "-sKILL", "30", MANIFOLD_PATH, obj, temp_name], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
+
+ watertight_conversion = completed.returncode
+ if completed.returncode != 0:
+ print(f"Skipping object (manifold failed): {h}")
+ write_failed(h)
+ remove_temp(temp_name)
+ return
+
+ # Simplify the object
+ completed = subprocess.run(["timeout", "-sKILL", "60", SIMPLIFY_PATH, "-i", temp_name, "-o", outfile, "-m", "-r", "0.02"], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
+
+ if completed.returncode != 0:
+ print(f"Skipping object (simplify failed): {h}")
+ write_failed(h)
+ remove_temp(temp_name)
+ return
+
+ if not os.path.exists(outfile):
+ print(f"Skipping object (outfile not created): {h}")
+ write_failed(h)
+ remove_temp(temp_name)
+ return
+
+ print(f"Finished object: {h}")
+ remove_temp(temp_name)
+
+# -- Issue the commands in a multiprocessing pool -- #
+with Pool(cpu_count()-4) as p:
+# with Pool(1) as p:
+ examples = list(
+ tqdm(
+ p.imap_unordered(process_hash, hashes),
+ total=len(hashes)
+ )
+ )
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/uniform_quaternions/data2_4608.qua b/dimos/models/manipulation/contact_graspnet_pytorch/uniform_quaternions/data2_4608.qua
new file mode 100644
index 0000000000..6a7769a04e
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/uniform_quaternions/data2_4608.qua
@@ -0,0 +1,4608 @@
+0.762127 0.0499525 0.425606 0.485311
+0.749087 0.149003 0.358619 0.536711
+0.72323 0.245503 0.285496 0.578929
+0.684998 0.337804 0.207488 0.611241
+0.635045 0.424324 0.12593 0.633094
+0.574227 0.503584 0.0422176 0.644115
+0.503584 0.574227 -0.0422175 0.644115
+0.424324 0.635045 -0.12593 0.633094
+0.337804 0.684998 -0.207488 0.611241
+0.245503 0.72323 -0.285496 0.578929
+0.149003 0.749087 -0.358619 0.536711
+0.0499524 0.762127 -0.425606 0.48531
+-0.0499525 0.762127 -0.485311 0.425606
+-0.149003 0.749087 -0.536711 0.358619
+-0.245504 0.72323 -0.578929 0.285496
+-0.337804 0.684998 -0.611241 0.207488
+-0.424324 0.635045 -0.633094 0.12593
+-0.503584 0.574227 -0.644115 0.0422174
+-0.574227 0.503584 -0.644115 -0.0422176
+-0.635045 0.424324 -0.633094 -0.12593
+-0.684998 0.337804 -0.611241 -0.207488
+-0.72323 0.245503 -0.578929 -0.285496
+-0.749087 0.149003 -0.536711 -0.358619
+-0.762127 0.0499523 -0.48531 -0.425606
+0.814748 0.0534014 0.288675 0.5
+0.800808 0.159291 0.220942 0.533402
+0.773165 0.262454 0.149429 0.557678
+0.732294 0.361127 0.0753593 0.572411
+0.678892 0.453621 -8.60604e-10 0.57735
+0.613875 0.538354 -0.0753593 0.572411
+0.538354 0.613875 -0.149429 0.557678
+0.453621 0.678892 -0.220942 0.533402
+0.361127 0.732294 -0.288675 0.5
+0.262454 0.773165 -0.351469 0.458043
+0.159291 0.800808 -0.408248 0.408248
+0.0534014 0.814748 -0.458043 0.351469
+-0.0534015 0.814748 -0.5 0.288675
+-0.159291 0.800808 -0.533402 0.220942
+-0.262454 0.773165 -0.557678 0.149429
+-0.361127 0.732293 -0.572411 0.0753592
+-0.453621 0.678892 -0.57735 -9.49229e-08
+-0.538354 0.613875 -0.572411 -0.0753594
+-0.613875 0.538354 -0.557678 -0.149429
+-0.678892 0.453621 -0.533402 -0.220942
+-0.732294 0.361127 -0.5 -0.288675
+-0.773165 0.262454 -0.458043 -0.351469
+-0.800808 0.15929 -0.408248 -0.408248
+-0.814748 0.0534013 -0.351468 -0.458043
+0.814748 0.0534014 0.458043 0.351469
+0.800808 0.159291 0.408248 0.408248
+0.773165 0.262454 0.351469 0.458043
+0.732294 0.361127 0.288675 0.5
+0.678892 0.453621 0.220942 0.533402
+0.613875 0.538354 0.149429 0.557678
+0.538354 0.613875 0.0753593 0.572411
+0.453621 0.678892 -2.43762e-08 0.57735
+0.361127 0.732294 -0.0753593 0.572411
+0.262454 0.773165 -0.149429 0.557678
+0.159291 0.800808 -0.220942 0.533402
+0.0534014 0.814748 -0.288675 0.5
+-0.0534015 0.814748 -0.351469 0.458043
+-0.159291 0.800808 -0.408248 0.408248
+-0.262454 0.773165 -0.458043 0.351469
+-0.361127 0.732293 -0.5 0.288675
+-0.453621 0.678892 -0.533402 0.220942
+-0.538354 0.613875 -0.557678 0.149429
+-0.613875 0.538354 -0.572411 0.0753593
+-0.678892 0.453621 -0.57735 -4.9613e-08
+-0.732294 0.361127 -0.572411 -0.0753594
+-0.773165 0.262454 -0.557677 -0.149429
+-0.800808 0.15929 -0.533402 -0.220943
+-0.814748 0.0534013 -0.5 -0.288675
+0.864171 0.0566408 0.329673 0.37592
+0.849385 0.168953 0.277785 0.415735
+0.820066 0.278375 0.221144 0.448436
+0.776715 0.383033 0.16072 0.473465
+0.720074 0.481138 0.0975452 0.490393
+0.651112 0.57101 0.0327016 0.498929
+0.57101 0.651112 -0.0327016 0.498929
+0.481138 0.720074 -0.0975452 0.490393
+0.383033 0.776715 -0.16072 0.473465
+0.278375 0.820066 -0.221144 0.448436
+0.168953 0.849385 -0.277785 0.415735
+0.0566407 0.864171 -0.329673 0.37592
+-0.0566408 0.864171 -0.37592 0.329673
+-0.168953 0.849385 -0.415735 0.277785
+-0.278375 0.820066 -0.448436 0.221144
+-0.383033 0.776715 -0.473465 0.16072
+-0.481138 0.720074 -0.490393 0.0975451
+-0.57101 0.651112 -0.498929 0.0327015
+-0.651112 0.57101 -0.498929 -0.0327016
+-0.720074 0.481138 -0.490393 -0.0975452
+-0.776715 0.383033 -0.473465 -0.16072
+-0.820066 0.278375 -0.448436 -0.221144
+-0.849385 0.168953 -0.415735 -0.277785
+-0.864171 0.0566406 -0.37592 -0.329673
+0.864171 0.0566408 0.16072 0.473465
+0.849385 0.168953 0.0975452 0.490393
+0.820066 0.278375 0.0327016 0.498929
+0.776715 0.383033 -0.0327016 0.498929
+0.720074 0.481138 -0.0975452 0.490393
+0.651112 0.57101 -0.16072 0.473465
+0.57101 0.651112 -0.221144 0.448436
+0.481138 0.720074 -0.277785 0.415735
+0.383033 0.776715 -0.329673 0.37592
+0.278375 0.820066 -0.37592 0.329673
+0.168953 0.849385 -0.415735 0.277785
+0.0566407 0.864171 -0.448436 0.221144
+-0.0566408 0.864171 -0.473465 0.16072
+-0.168953 0.849385 -0.490393 0.0975452
+-0.278375 0.820066 -0.498929 0.0327015
+-0.383033 0.776715 -0.498929 -0.0327017
+-0.481138 0.720074 -0.490393 -0.0975452
+-0.57101 0.651112 -0.473465 -0.16072
+-0.651112 0.57101 -0.448436 -0.221144
+-0.720074 0.481138 -0.415735 -0.277785
+-0.776715 0.383033 -0.37592 -0.329673
+-0.820066 0.278375 -0.329673 -0.37592
+-0.849385 0.168953 -0.277785 -0.415735
+-0.864171 0.0566406 -0.221144 -0.448436
+0.910916 0.0597046 0.0532871 0.404756
+0.89533 0.178092 -2.23064e-09 0.408248
+0.864425 0.293433 -0.0532871 0.404756
+0.818729 0.403753 -0.105662 0.394338
+0.759024 0.507164 -0.15623 0.377172
+0.686333 0.601898 -0.204124 0.353553
+0.601898 0.686333 -0.248526 0.323885
+0.507164 0.759024 -0.288675 0.288675
+0.403752 0.818729 -0.323885 0.248526
+0.293433 0.864425 -0.353553 0.204124
+0.178092 0.89533 -0.377172 0.15623
+0.0597046 0.910916 -0.394338 0.105662
+-0.0597046 0.910916 -0.404756 0.0532871
+-0.178092 0.89533 -0.408248 -1.82562e-09
+-0.293433 0.864425 -0.404756 -0.0532871
+-0.403753 0.818729 -0.394338 -0.105663
+-0.507164 0.759024 -0.377172 -0.15623
+-0.601898 0.686333 -0.353553 -0.204124
+-0.686333 0.601898 -0.323885 -0.248526
+-0.759024 0.507164 -0.288675 -0.288675
+-0.818729 0.403752 -0.248526 -0.323885
+-0.864425 0.293433 -0.204124 -0.353553
+-0.89533 0.178092 -0.15623 -0.377172
+-0.910916 0.0597044 -0.105662 -0.394338
+0.910916 0.0597046 0.204124 0.353553
+0.89533 0.178092 0.15623 0.377172
+0.864425 0.293433 0.105662 0.394338
+0.818729 0.403753 0.0532871 0.404756
+0.759024 0.507164 -6.08539e-10 0.408248
+0.686333 0.601898 -0.0532871 0.404756
+0.601898 0.686333 -0.105662 0.394338
+0.507164 0.759024 -0.15623 0.377172
+0.403752 0.818729 -0.204124 0.353553
+0.293433 0.864425 -0.248526 0.323885
+0.178092 0.89533 -0.288675 0.288675
+0.0597046 0.910916 -0.323885 0.248526
+-0.0597046 0.910916 -0.353553 0.204124
+-0.178092 0.89533 -0.377172 0.15623
+-0.293433 0.864425 -0.394338 0.105662
+-0.403753 0.818729 -0.404756 0.053287
+-0.507164 0.759024 -0.408248 -6.71206e-08
+-0.601898 0.686333 -0.404756 -0.0532872
+-0.686333 0.601898 -0.394338 -0.105662
+-0.759024 0.507164 -0.377172 -0.15623
+-0.818729 0.403752 -0.353553 -0.204124
+-0.864425 0.293433 -0.323885 -0.248526
+-0.89533 0.178092 -0.288675 -0.288675
+-0.910916 0.0597044 -0.248526 -0.323885
+0.949933 0.0622619 0.059734 0.300303
+0.93368 0.18572 0.0200255 0.305531
+0.901451 0.306001 -0.0200255 0.305531
+0.853797 0.421046 -0.059734 0.300303
+0.791535 0.528887 -0.0984203 0.289937
+0.71573 0.627678 -0.135423 0.27461
+0.627679 0.71573 -0.170108 0.254585
+0.528887 0.791536 -0.201883 0.230203
+0.421046 0.853797 -0.230203 0.201883
+0.306001 0.901451 -0.254585 0.170108
+0.18572 0.93368 -0.27461 0.135423
+0.0622619 0.949933 -0.289937 0.0984203
+-0.062262 0.949933 -0.300303 0.059734
+-0.18572 0.93368 -0.305531 0.0200255
+-0.306001 0.901451 -0.305531 -0.0200256
+-0.421046 0.853797 -0.300303 -0.059734
+-0.528887 0.791535 -0.289937 -0.0984204
+-0.627679 0.71573 -0.27461 -0.135423
+-0.71573 0.627678 -0.254585 -0.170108
+-0.791536 0.528887 -0.230203 -0.201883
+-0.853797 0.421046 -0.201883 -0.230203
+-0.901451 0.306001 -0.170108 -0.254585
+-0.93368 0.18572 -0.135423 -0.27461
+-0.949933 0.0622617 -0.0984203 -0.289937
+0.864171 0.0566408 0.448436 0.221144
+0.849385 0.168953 0.415735 0.277785
+0.820066 0.278375 0.37592 0.329673
+0.776715 0.383033 0.329673 0.37592
+0.720074 0.481138 0.277785 0.415735
+0.651112 0.57101 0.221144 0.448436
+0.57101 0.651112 0.16072 0.473465
+0.481138 0.720074 0.0975451 0.490393
+0.383033 0.776715 0.0327016 0.498929
+0.278375 0.820066 -0.0327016 0.498929
+0.168953 0.849385 -0.0975451 0.490393
+0.0566407 0.864171 -0.16072 0.473465
+-0.0566408 0.864171 -0.221144 0.448436
+-0.168953 0.849385 -0.277785 0.415735
+-0.278375 0.820066 -0.329673 0.37592
+-0.383033 0.776715 -0.37592 0.329673
+-0.481138 0.720074 -0.415735 0.277785
+-0.57101 0.651112 -0.448436 0.221144
+-0.651112 0.57101 -0.473465 0.16072
+-0.720074 0.481138 -0.490393 0.0975451
+-0.776715 0.383033 -0.498929 0.0327015
+-0.820066 0.278375 -0.498929 -0.0327017
+-0.849385 0.168953 -0.490393 -0.0975453
+-0.864171 0.0566406 -0.473465 -0.16072
+0.910916 0.0597046 0.323885 0.248526
+0.89533 0.178092 0.288675 0.288675
+0.864425 0.293433 0.248526 0.323885
+0.818729 0.403753 0.204124 0.353553
+0.759024 0.507164 0.15623 0.377172
+0.686333 0.601898 0.105662 0.394338
+0.601898 0.686333 0.0532871 0.404756
+0.507164 0.759024 -1.72366e-08 0.408248
+0.403752 0.818729 -0.0532871 0.404756
+0.293433 0.864425 -0.105662 0.394338
+0.178092 0.89533 -0.15623 0.377172
+0.0597046 0.910916 -0.204124 0.353553
+-0.0597046 0.910916 -0.248526 0.323885
+-0.178092 0.89533 -0.288675 0.288675
+-0.293433 0.864425 -0.323885 0.248526
+-0.403753 0.818729 -0.353553 0.204124
+-0.507164 0.759024 -0.377172 0.15623
+-0.601898 0.686333 -0.394338 0.105662
+-0.686333 0.601898 -0.404756 0.053287
+-0.759024 0.507164 -0.408248 -3.50817e-08
+-0.818729 0.403752 -0.404756 -0.0532871
+-0.864425 0.293433 -0.394338 -0.105663
+-0.89533 0.178092 -0.377172 -0.15623
+-0.910916 0.0597044 -0.353553 -0.204124
+0.910916 0.0597046 0.394338 0.105662
+0.89533 0.178092 0.377172 0.15623
+0.864425 0.293433 0.353553 0.204124
+0.818729 0.403753 0.323885 0.248526
+0.759024 0.507164 0.288675 0.288675
+0.686333 0.601898 0.248526 0.323885
+0.601898 0.686333 0.204124 0.353553
+0.507164 0.759024 0.15623 0.377172
+0.403752 0.818729 0.105662 0.394338
+0.293433 0.864425 0.0532871 0.404756
+0.178092 0.89533 1.48024e-08 0.408248
+0.0597046 0.910916 -0.0532871 0.404756
+-0.0597046 0.910916 -0.105662 0.394338
+-0.178092 0.89533 -0.15623 0.377172
+-0.293433 0.864425 -0.204124 0.353553
+-0.403753 0.818729 -0.248526 0.323885
+-0.507164 0.759024 -0.288675 0.288675
+-0.601898 0.686333 -0.323885 0.248526
+-0.686333 0.601898 -0.353553 0.204124
+-0.759024 0.507164 -0.377172 0.15623
+-0.818729 0.403752 -0.394338 0.105662
+-0.864425 0.293433 -0.404756 0.053287
+-0.89533 0.178092 -0.408248 -1.00377e-07
+-0.910916 0.0597044 -0.404756 -0.0532872
+0.949933 0.0622619 0.289937 0.0984203
+0.93368 0.18572 0.27461 0.135423
+0.901451 0.306001 0.254585 0.170108
+0.853797 0.421046 0.230203 0.201883
+0.791535 0.528887 0.201883 0.230203
+0.71573 0.627678 0.170108 0.254585
+0.627679 0.71573 0.135423 0.27461
+0.528887 0.791536 0.0984203 0.289937
+0.421046 0.853797 0.059734 0.300303
+0.306001 0.901451 0.0200255 0.305531
+0.18572 0.93368 -0.0200255 0.305531
+0.0622619 0.949933 -0.059734 0.300303
+-0.062262 0.949933 -0.0984203 0.289937
+-0.18572 0.93368 -0.135423 0.27461
+-0.306001 0.901451 -0.170108 0.254585
+-0.421046 0.853797 -0.201883 0.230203
+-0.528887 0.791535 -0.230203 0.201883
+-0.627679 0.71573 -0.254585 0.170108
+-0.71573 0.627678 -0.27461 0.135423
+-0.791536 0.528887 -0.289937 0.0984203
+-0.853797 0.421046 -0.300303 0.0597339
+-0.901451 0.306001 -0.305531 0.0200255
+-0.93368 0.18572 -0.305531 -0.0200256
+-0.949933 0.0622617 -0.300303 -0.059734
+0.949933 0.0622619 0.201883 0.230203
+0.93368 0.18572 0.170108 0.254585
+0.901451 0.306001 0.135423 0.27461
+0.853797 0.421046 0.0984203 0.289937
+0.791535 0.528887 0.059734 0.300303
+0.71573 0.627678 0.0200255 0.305531
+0.627679 0.71573 -0.0200255 0.305531
+0.528887 0.791536 -0.059734 0.300303
+0.421046 0.853797 -0.0984203 0.289937
+0.306001 0.901451 -0.135423 0.27461
+0.18572 0.93368 -0.170108 0.254585
+0.0622619 0.949933 -0.201883 0.230203
+-0.062262 0.949933 -0.230203 0.201883
+-0.18572 0.93368 -0.254585 0.170108
+-0.306001 0.901451 -0.27461 0.135423
+-0.421046 0.853797 -0.289937 0.0984203
+-0.528887 0.791535 -0.300303 0.0597339
+-0.627679 0.71573 -0.305531 0.0200255
+-0.71573 0.627678 -0.305531 -0.0200256
+-0.791536 0.528887 -0.300303 -0.059734
+-0.853797 0.421046 -0.289937 -0.0984204
+-0.901451 0.306001 -0.27461 -0.135423
+-0.93368 0.18572 -0.254584 -0.170108
+-0.949933 0.0622617 -0.230203 -0.201883
+0.976849 0.0640261 0.0656136 0.193291
+0.960135 0.190983 0.0398226 0.200202
+0.926993 0.314672 0.0133504 0.203687
+0.877989 0.432976 -0.0133504 0.203687
+0.813963 0.543873 -0.0398226 0.200202
+0.73601 0.645463 -0.0656136 0.193291
+0.645463 0.73601 -0.0902818 0.183073
+0.543873 0.813963 -0.113405 0.169723
+0.432976 0.877989 -0.134588 0.153469
+0.314672 0.926993 -0.153469 0.134588
+0.190983 0.960135 -0.169723 0.113405
+0.064026 0.976849 -0.183073 0.0902818
+-0.0640261 0.976849 -0.193291 0.0656136
+-0.190983 0.960135 -0.200202 0.0398226
+-0.314672 0.926992 -0.203687 0.0133503
+-0.432976 0.877989 -0.203687 -0.0133504
+-0.543873 0.813963 -0.200202 -0.0398227
+-0.645463 0.73601 -0.193291 -0.0656136
+-0.73601 0.645463 -0.183073 -0.0902818
+-0.813963 0.543873 -0.169723 -0.113405
+-0.877989 0.432976 -0.153469 -0.134588
+-0.926993 0.314671 -0.134588 -0.153469
+-0.960135 0.190982 -0.113405 -0.169723
+-0.976849 0.0640259 -0.0902818 -0.183073
+0.976849 0.0640261 0.183073 0.0902818
+0.960135 0.190983 0.169723 0.113405
+0.926993 0.314672 0.153469 0.134588
+0.877989 0.432976 0.134588 0.153469
+0.813963 0.543873 0.113405 0.169723
+0.73601 0.645463 0.0902818 0.183073
+0.645463 0.73601 0.0656136 0.193291
+0.543873 0.813963 0.0398226 0.200202
+0.432976 0.877989 0.0133504 0.203687
+0.314672 0.926993 -0.0133504 0.203687
+0.190983 0.960135 -0.0398226 0.200202
+0.064026 0.976849 -0.0656136 0.193291
+-0.0640261 0.976849 -0.0902818 0.183073
+-0.190983 0.960135 -0.113405 0.169723
+-0.314672 0.926992 -0.134588 0.153469
+-0.432976 0.877989 -0.153469 0.134588
+-0.543873 0.813963 -0.169723 0.113405
+-0.645463 0.73601 -0.183073 0.0902818
+-0.73601 0.645463 -0.193291 0.0656135
+-0.813963 0.543873 -0.200202 0.0398226
+-0.877989 0.432976 -0.203687 0.0133503
+-0.926993 0.314671 -0.203687 -0.0133504
+-0.960135 0.190982 -0.200202 -0.0398227
+-0.976849 0.0640259 -0.193291 -0.0656136
+0.992648 0.0650616 0.0672942 0.0767343
+0.975664 0.194072 0.0567026 0.0848615
+0.941985 0.319761 0.0451409 0.0915367
+0.892189 0.439979 0.0328068 0.0966457
+0.827128 0.552669 0.0199113 0.100101
+0.747914 0.655903 0.00667518 0.101844
+0.655903 0.747914 -0.00667518 0.101844
+0.552669 0.827128 -0.0199113 0.100101
+0.439979 0.892189 -0.0328068 0.0966457
+0.319761 0.941985 -0.0451409 0.0915367
+0.194072 0.975664 -0.0567026 0.0848615
+0.0650615 0.992648 -0.0672942 0.0767343
+-0.0650616 0.992648 -0.0767343 0.0672942
+-0.194072 0.975664 -0.0848615 0.0567026
+-0.319761 0.941985 -0.0915367 0.0451409
+-0.439979 0.892189 -0.0966457 0.0328068
+-0.552669 0.827128 -0.100101 0.0199113
+-0.655903 0.747914 -0.101844 0.00667516
+-0.747914 0.655903 -0.101844 -0.00667519
+-0.827128 0.552669 -0.100101 -0.0199113
+-0.892189 0.439979 -0.0966456 -0.0328068
+-0.941985 0.319761 -0.0915367 -0.0451409
+-0.975664 0.194071 -0.0848615 -0.0567027
+-0.992648 0.0650614 -0.0767343 -0.0672942
+0.762127 0.0499525 -0.485311 0.425606
+0.749087 0.149003 -0.536711 0.358619
+0.72323 0.245503 -0.578929 0.285496
+0.684998 0.337804 -0.611241 0.207488
+0.635045 0.424324 -0.633094 0.12593
+0.574227 0.503584 -0.644115 0.0422176
+0.503584 0.574227 -0.644115 -0.0422175
+0.424324 0.635045 -0.633094 -0.12593
+0.337804 0.684998 -0.611241 -0.207488
+0.245503 0.72323 -0.578929 -0.285496
+0.149003 0.749087 -0.536711 -0.358619
+0.0499524 0.762127 -0.48531 -0.425606
+-0.0499525 0.762127 -0.425606 -0.485311
+-0.149003 0.749087 -0.358619 -0.536711
+-0.245504 0.72323 -0.285496 -0.578929
+-0.337804 0.684998 -0.207488 -0.611241
+-0.424324 0.635045 -0.12593 -0.633094
+-0.503584 0.574227 -0.0422174 -0.644115
+-0.574227 0.503584 0.0422176 -0.644115
+-0.635045 0.424324 0.12593 -0.633094
+-0.684998 0.337804 0.207488 -0.611241
+-0.72323 0.245503 0.285496 -0.578929
+-0.749087 0.149003 0.358619 -0.536711
+-0.762127 0.0499523 0.425606 -0.48531
+0.814748 0.0534014 -0.5 0.288675
+0.800808 0.159291 -0.533402 0.220942
+0.773165 0.262454 -0.557678 0.149429
+0.732294 0.361127 -0.572411 0.0753593
+0.678892 0.453621 -0.57735 -8.60604e-10
+0.613875 0.538354 -0.572411 -0.0753593
+0.538354 0.613875 -0.557678 -0.149429
+0.453621 0.678892 -0.533402 -0.220942
+0.361127 0.732294 -0.5 -0.288675
+0.262454 0.773165 -0.458043 -0.351469
+0.159291 0.800808 -0.408248 -0.408248
+0.0534014 0.814748 -0.351469 -0.458043
+-0.0534015 0.814748 -0.288675 -0.5
+-0.159291 0.800808 -0.220942 -0.533402
+-0.262454 0.773165 -0.149429 -0.557678
+-0.361127 0.732293 -0.0753592 -0.572411
+-0.453621 0.678892 9.49229e-08 -0.57735
+-0.538354 0.613875 0.0753594 -0.572411
+-0.613875 0.538354 0.149429 -0.557678
+-0.678892 0.453621 0.220942 -0.533402
+-0.732294 0.361127 0.288675 -0.5
+-0.773165 0.262454 0.351469 -0.458043
+-0.800808 0.15929 0.408248 -0.408248
+-0.814748 0.0534013 0.458043 -0.351468
+0.814748 0.0534014 -0.351469 0.458043
+0.800808 0.159291 -0.408248 0.408248
+0.773165 0.262454 -0.458043 0.351469
+0.732294 0.361127 -0.5 0.288675
+0.678892 0.453621 -0.533402 0.220942
+0.613875 0.538354 -0.557678 0.149429
+0.538354 0.613875 -0.572411 0.0753593
+0.453621 0.678892 -0.57735 -2.43762e-08
+0.361127 0.732294 -0.572411 -0.0753593
+0.262454 0.773165 -0.557678 -0.149429
+0.159291 0.800808 -0.533402 -0.220942
+0.0534014 0.814748 -0.5 -0.288675
+-0.0534015 0.814748 -0.458043 -0.351469
+-0.159291 0.800808 -0.408248 -0.408248
+-0.262454 0.773165 -0.351469 -0.458043
+-0.361127 0.732293 -0.288675 -0.5
+-0.453621 0.678892 -0.220942 -0.533402
+-0.538354 0.613875 -0.149429 -0.557678
+-0.613875 0.538354 -0.0753593 -0.572411
+-0.678892 0.453621 4.9613e-08 -0.57735
+-0.732294 0.361127 0.0753594 -0.572411
+-0.773165 0.262454 0.149429 -0.557677
+-0.800808 0.15929 0.220943 -0.533402
+-0.814748 0.0534013 0.288675 -0.5
+0.864171 0.0566408 -0.37592 0.329673
+0.849385 0.168953 -0.415735 0.277785
+0.820066 0.278375 -0.448436 0.221144
+0.776715 0.383033 -0.473465 0.16072
+0.720074 0.481138 -0.490393 0.0975452
+0.651112 0.57101 -0.498929 0.0327016
+0.57101 0.651112 -0.498929 -0.0327016
+0.481138 0.720074 -0.490393 -0.0975452
+0.383033 0.776715 -0.473465 -0.16072
+0.278375 0.820066 -0.448436 -0.221144
+0.168953 0.849385 -0.415735 -0.277785
+0.0566407 0.864171 -0.37592 -0.329673
+-0.0566408 0.864171 -0.329673 -0.37592
+-0.168953 0.849385 -0.277785 -0.415735
+-0.278375 0.820066 -0.221144 -0.448436
+-0.383033 0.776715 -0.16072 -0.473465
+-0.481138 0.720074 -0.0975451 -0.490393
+-0.57101 0.651112 -0.0327015 -0.498929
+-0.651112 0.57101 0.0327016 -0.498929
+-0.720074 0.481138 0.0975452 -0.490393
+-0.776715 0.383033 0.16072 -0.473465
+-0.820066 0.278375 0.221144 -0.448436
+-0.849385 0.168953 0.277785 -0.415735
+-0.864171 0.0566406 0.329673 -0.37592
+0.864171 0.0566408 -0.473465 0.16072
+0.849385 0.168953 -0.490393 0.0975452
+0.820066 0.278375 -0.498929 0.0327016
+0.776715 0.383033 -0.498929 -0.0327016
+0.720074 0.481138 -0.490393 -0.0975452
+0.651112 0.57101 -0.473465 -0.16072
+0.57101 0.651112 -0.448436 -0.221144
+0.481138 0.720074 -0.415735 -0.277785
+0.383033 0.776715 -0.37592 -0.329673
+0.278375 0.820066 -0.329673 -0.37592
+0.168953 0.849385 -0.277785 -0.415735
+0.0566407 0.864171 -0.221144 -0.448436
+-0.0566408 0.864171 -0.16072 -0.473465
+-0.168953 0.849385 -0.0975452 -0.490393
+-0.278375 0.820066 -0.0327015 -0.498929
+-0.383033 0.776715 0.0327017 -0.498929
+-0.481138 0.720074 0.0975452 -0.490393
+-0.57101 0.651112 0.16072 -0.473465
+-0.651112 0.57101 0.221144 -0.448436
+-0.720074 0.481138 0.277785 -0.415735
+-0.776715 0.383033 0.329673 -0.37592
+-0.820066 0.278375 0.37592 -0.329673
+-0.849385 0.168953 0.415735 -0.277785
+-0.864171 0.0566406 0.448436 -0.221144
+0.910916 0.0597046 -0.404756 0.0532871
+0.89533 0.178092 -0.408248 -2.23064e-09
+0.864425 0.293433 -0.404756 -0.0532871
+0.818729 0.403753 -0.394338 -0.105662
+0.759024 0.507164 -0.377172 -0.15623
+0.686333 0.601898 -0.353553 -0.204124
+0.601898 0.686333 -0.323885 -0.248526
+0.507164 0.759024 -0.288675 -0.288675
+0.403752 0.818729 -0.248526 -0.323885
+0.293433 0.864425 -0.204124 -0.353553
+0.178092 0.89533 -0.15623 -0.377172
+0.0597046 0.910916 -0.105662 -0.394338
+-0.0597046 0.910916 -0.0532871 -0.404756
+-0.178092 0.89533 1.82562e-09 -0.408248
+-0.293433 0.864425 0.0532871 -0.404756
+-0.403753 0.818729 0.105663 -0.394338
+-0.507164 0.759024 0.15623 -0.377172
+-0.601898 0.686333 0.204124 -0.353553
+-0.686333 0.601898 0.248526 -0.323885
+-0.759024 0.507164 0.288675 -0.288675
+-0.818729 0.403752 0.323885 -0.248526
+-0.864425 0.293433 0.353553 -0.204124
+-0.89533 0.178092 0.377172 -0.15623
+-0.910916 0.0597044 0.394338 -0.105662
+0.910916 0.0597046 -0.353553 0.204124
+0.89533 0.178092 -0.377172 0.15623
+0.864425 0.293433 -0.394338 0.105662
+0.818729 0.403753 -0.404756 0.0532871
+0.759024 0.507164 -0.408248 -6.08539e-10
+0.686333 0.601898 -0.404756 -0.0532871
+0.601898 0.686333 -0.394338 -0.105662
+0.507164 0.759024 -0.377172 -0.15623
+0.403752 0.818729 -0.353553 -0.204124
+0.293433 0.864425 -0.323885 -0.248526
+0.178092 0.89533 -0.288675 -0.288675
+0.0597046 0.910916 -0.248526 -0.323885
+-0.0597046 0.910916 -0.204124 -0.353553
+-0.178092 0.89533 -0.15623 -0.377172
+-0.293433 0.864425 -0.105662 -0.394338
+-0.403753 0.818729 -0.053287 -0.404756
+-0.507164 0.759024 6.71206e-08 -0.408248
+-0.601898 0.686333 0.0532872 -0.404756
+-0.686333 0.601898 0.105662 -0.394338
+-0.759024 0.507164 0.15623 -0.377172
+-0.818729 0.403752 0.204124 -0.353553
+-0.864425 0.293433 0.248526 -0.323885
+-0.89533 0.178092 0.288675 -0.288675
+-0.910916 0.0597044 0.323885 -0.248526
+0.949933 0.0622619 -0.300303 0.059734
+0.93368 0.18572 -0.305531 0.0200255
+0.901451 0.306001 -0.305531 -0.0200255
+0.853797 0.421046 -0.300303 -0.059734
+0.791535 0.528887 -0.289937 -0.0984203
+0.71573 0.627678 -0.27461 -0.135423
+0.627679 0.71573 -0.254585 -0.170108
+0.528887 0.791536 -0.230203 -0.201883
+0.421046 0.853797 -0.201883 -0.230203
+0.306001 0.901451 -0.170108 -0.254585
+0.18572 0.93368 -0.135423 -0.27461
+0.0622619 0.949933 -0.0984203 -0.289937
+-0.062262 0.949933 -0.059734 -0.300303
+-0.18572 0.93368 -0.0200255 -0.305531
+-0.306001 0.901451 0.0200256 -0.305531
+-0.421046 0.853797 0.059734 -0.300303
+-0.528887 0.791535 0.0984204 -0.289937
+-0.627679 0.71573 0.135423 -0.27461
+-0.71573 0.627678 0.170108 -0.254585
+-0.791536 0.528887 0.201883 -0.230203
+-0.853797 0.421046 0.230203 -0.201883
+-0.901451 0.306001 0.254585 -0.170108
+-0.93368 0.18572 0.27461 -0.135423
+-0.949933 0.0622617 0.289937 -0.0984203
+0.864171 0.0566408 -0.221144 0.448436
+0.849385 0.168953 -0.277785 0.415735
+0.820066 0.278375 -0.329673 0.37592
+0.776715 0.383033 -0.37592 0.329673
+0.720074 0.481138 -0.415735 0.277785
+0.651112 0.57101 -0.448436 0.221144
+0.57101 0.651112 -0.473465 0.16072
+0.481138 0.720074 -0.490393 0.0975451
+0.383033 0.776715 -0.498929 0.0327016
+0.278375 0.820066 -0.498929 -0.0327016
+0.168953 0.849385 -0.490393 -0.0975451
+0.0566407 0.864171 -0.473465 -0.16072
+-0.0566408 0.864171 -0.448436 -0.221144
+-0.168953 0.849385 -0.415735 -0.277785
+-0.278375 0.820066 -0.37592 -0.329673
+-0.383033 0.776715 -0.329673 -0.37592
+-0.481138 0.720074 -0.277785 -0.415735
+-0.57101 0.651112 -0.221144 -0.448436
+-0.651112 0.57101 -0.16072 -0.473465
+-0.720074 0.481138 -0.0975451 -0.490393
+-0.776715 0.383033 -0.0327015 -0.498929
+-0.820066 0.278375 0.0327017 -0.498929
+-0.849385 0.168953 0.0975453 -0.490393
+-0.864171 0.0566406 0.16072 -0.473465
+0.910916 0.0597046 -0.248526 0.323885
+0.89533 0.178092 -0.288675 0.288675
+0.864425 0.293433 -0.323885 0.248526
+0.818729 0.403753 -0.353553 0.204124
+0.759024 0.507164 -0.377172 0.15623
+0.686333 0.601898 -0.394338 0.105662
+0.601898 0.686333 -0.404756 0.0532871
+0.507164 0.759024 -0.408248 -1.72366e-08
+0.403752 0.818729 -0.404756 -0.0532871
+0.293433 0.864425 -0.394338 -0.105662
+0.178092 0.89533 -0.377172 -0.15623
+0.0597046 0.910916 -0.353553 -0.204124
+-0.0597046 0.910916 -0.323885 -0.248526
+-0.178092 0.89533 -0.288675 -0.288675
+-0.293433 0.864425 -0.248526 -0.323885
+-0.403753 0.818729 -0.204124 -0.353553
+-0.507164 0.759024 -0.15623 -0.377172
+-0.601898 0.686333 -0.105662 -0.394338
+-0.686333 0.601898 -0.053287 -0.404756
+-0.759024 0.507164 3.50817e-08 -0.408248
+-0.818729 0.403752 0.0532871 -0.404756
+-0.864425 0.293433 0.105663 -0.394338
+-0.89533 0.178092 0.15623 -0.377172
+-0.910916 0.0597044 0.204124 -0.353553
+0.910916 0.0597046 -0.105662 0.394338
+0.89533 0.178092 -0.15623 0.377172
+0.864425 0.293433 -0.204124 0.353553
+0.818729 0.403753 -0.248526 0.323885
+0.759024 0.507164 -0.288675 0.288675
+0.686333 0.601898 -0.323885 0.248526
+0.601898 0.686333 -0.353553 0.204124
+0.507164 0.759024 -0.377172 0.15623
+0.403752 0.818729 -0.394338 0.105662
+0.293433 0.864425 -0.404756 0.0532871
+0.178092 0.89533 -0.408248 1.48024e-08
+0.0597046 0.910916 -0.404756 -0.0532871
+-0.0597046 0.910916 -0.394338 -0.105662
+-0.178092 0.89533 -0.377172 -0.15623
+-0.293433 0.864425 -0.353553 -0.204124
+-0.403753 0.818729 -0.323885 -0.248526
+-0.507164 0.759024 -0.288675 -0.288675
+-0.601898 0.686333 -0.248526 -0.323885
+-0.686333 0.601898 -0.204124 -0.353553
+-0.759024 0.507164 -0.15623 -0.377172
+-0.818729 0.403752 -0.105662 -0.394338
+-0.864425 0.293433 -0.053287 -0.404756
+-0.89533 0.178092 1.00377e-07 -0.408248
+-0.910916 0.0597044 0.0532872 -0.404756
+0.949933 0.0622619 -0.0984203 0.289937
+0.93368 0.18572 -0.135423 0.27461
+0.901451 0.306001 -0.170108 0.254585
+0.853797 0.421046 -0.201883 0.230203
+0.791535 0.528887 -0.230203 0.201883
+0.71573 0.627678 -0.254585 0.170108
+0.627679 0.71573 -0.27461 0.135423
+0.528887 0.791536 -0.289937 0.0984203
+0.421046 0.853797 -0.300303 0.059734
+0.306001 0.901451 -0.305531 0.0200255
+0.18572 0.93368 -0.305531 -0.0200255
+0.0622619 0.949933 -0.300303 -0.059734
+-0.062262 0.949933 -0.289937 -0.0984203
+-0.18572 0.93368 -0.27461 -0.135423
+-0.306001 0.901451 -0.254585 -0.170108
+-0.421046 0.853797 -0.230203 -0.201883
+-0.528887 0.791535 -0.201883 -0.230203
+-0.627679 0.71573 -0.170108 -0.254585
+-0.71573 0.627678 -0.135423 -0.27461
+-0.791536 0.528887 -0.0984203 -0.289937
+-0.853797 0.421046 -0.0597339 -0.300303
+-0.901451 0.306001 -0.0200255 -0.305531
+-0.93368 0.18572 0.0200256 -0.305531
+-0.949933 0.0622617 0.059734 -0.300303
+0.949933 0.0622619 -0.230203 0.201883
+0.93368 0.18572 -0.254585 0.170108
+0.901451 0.306001 -0.27461 0.135423
+0.853797 0.421046 -0.289937 0.0984203
+0.791535 0.528887 -0.300303 0.059734
+0.71573 0.627678 -0.305531 0.0200255
+0.627679 0.71573 -0.305531 -0.0200255
+0.528887 0.791536 -0.300303 -0.059734
+0.421046 0.853797 -0.289937 -0.0984203
+0.306001 0.901451 -0.27461 -0.135423
+0.18572 0.93368 -0.254585 -0.170108
+0.0622619 0.949933 -0.230203 -0.201883
+-0.062262 0.949933 -0.201883 -0.230203
+-0.18572 0.93368 -0.170108 -0.254585
+-0.306001 0.901451 -0.135423 -0.27461
+-0.421046 0.853797 -0.0984203 -0.289937
+-0.528887 0.791535 -0.0597339 -0.300303
+-0.627679 0.71573 -0.0200255 -0.305531
+-0.71573 0.627678 0.0200256 -0.305531
+-0.791536 0.528887 0.059734 -0.300303
+-0.853797 0.421046 0.0984204 -0.289937
+-0.901451 0.306001 0.135423 -0.27461
+-0.93368 0.18572 0.170108 -0.254584
+-0.949933 0.0622617 0.201883 -0.230203
+0.976849 0.0640261 -0.193291 0.0656136
+0.960135 0.190983 -0.200202 0.0398226
+0.926993 0.314672 -0.203687 0.0133504
+0.877989 0.432976 -0.203687 -0.0133504
+0.813963 0.543873 -0.200202 -0.0398226
+0.73601 0.645463 -0.193291 -0.0656136
+0.645463 0.73601 -0.183073 -0.0902818
+0.543873 0.813963 -0.169723 -0.113405
+0.432976 0.877989 -0.153469 -0.134588
+0.314672 0.926993 -0.134588 -0.153469
+0.190983 0.960135 -0.113405 -0.169723
+0.064026 0.976849 -0.0902818 -0.183073
+-0.0640261 0.976849 -0.0656136 -0.193291
+-0.190983 0.960135 -0.0398226 -0.200202
+-0.314672 0.926992 -0.0133503 -0.203687
+-0.432976 0.877989 0.0133504 -0.203687
+-0.543873 0.813963 0.0398227 -0.200202
+-0.645463 0.73601 0.0656136 -0.193291
+-0.73601 0.645463 0.0902818 -0.183073
+-0.813963 0.543873 0.113405 -0.169723
+-0.877989 0.432976 0.134588 -0.153469
+-0.926993 0.314671 0.153469 -0.134588
+-0.960135 0.190982 0.169723 -0.113405
+-0.976849 0.0640259 0.183073 -0.0902818
+0.976849 0.0640261 -0.0902818 0.183073
+0.960135 0.190983 -0.113405 0.169723
+0.926993 0.314672 -0.134588 0.153469
+0.877989 0.432976 -0.153469 0.134588
+0.813963 0.543873 -0.169723 0.113405
+0.73601 0.645463 -0.183073 0.0902818
+0.645463 0.73601 -0.193291 0.0656136
+0.543873 0.813963 -0.200202 0.0398226
+0.432976 0.877989 -0.203687 0.0133504
+0.314672 0.926993 -0.203687 -0.0133504
+0.190983 0.960135 -0.200202 -0.0398226
+0.064026 0.976849 -0.193291 -0.0656136
+-0.0640261 0.976849 -0.183073 -0.0902818
+-0.190983 0.960135 -0.169723 -0.113405
+-0.314672 0.926992 -0.153469 -0.134588
+-0.432976 0.877989 -0.134588 -0.153469
+-0.543873 0.813963 -0.113405 -0.169723
+-0.645463 0.73601 -0.0902818 -0.183073
+-0.73601 0.645463 -0.0656135 -0.193291
+-0.813963 0.543873 -0.0398226 -0.200202
+-0.877989 0.432976 -0.0133503 -0.203687
+-0.926993 0.314671 0.0133504 -0.203687
+-0.960135 0.190982 0.0398227 -0.200202
+-0.976849 0.0640259 0.0656136 -0.193291
+0.992648 0.0650616 -0.0767343 0.0672942
+0.975664 0.194072 -0.0848615 0.0567026
+0.941985 0.319761 -0.0915367 0.0451409
+0.892189 0.439979 -0.0966457 0.0328068
+0.827128 0.552669 -0.100101 0.0199113
+0.747914 0.655903 -0.101844 0.00667518
+0.655903 0.747914 -0.101844 -0.00667518
+0.552669 0.827128 -0.100101 -0.0199113
+0.439979 0.892189 -0.0966457 -0.0328068
+0.319761 0.941985 -0.0915367 -0.0451409
+0.194072 0.975664 -0.0848615 -0.0567026
+0.0650615 0.992648 -0.0767343 -0.0672942
+-0.0650616 0.992648 -0.0672942 -0.0767343
+-0.194072 0.975664 -0.0567026 -0.0848615
+-0.319761 0.941985 -0.0451409 -0.0915367
+-0.439979 0.892189 -0.0328068 -0.0966457
+-0.552669 0.827128 -0.0199113 -0.100101
+-0.655903 0.747914 -0.00667516 -0.101844
+-0.747914 0.655903 0.00667519 -0.101844
+-0.827128 0.552669 0.0199113 -0.100101
+-0.892189 0.439979 0.0328068 -0.0966456
+-0.941985 0.319761 0.0451409 -0.0915367
+-0.975664 0.194071 0.0567027 -0.0848615
+-0.992648 0.0650614 0.0672942 -0.0767343
+0.762127 0.0499525 -0.425606 -0.485311
+0.749087 0.149003 -0.358619 -0.536711
+0.72323 0.245503 -0.285496 -0.578929
+0.684998 0.337804 -0.207488 -0.611241
+0.635045 0.424324 -0.12593 -0.633094
+0.574227 0.503584 -0.0422176 -0.644115
+0.503584 0.574227 0.0422175 -0.644115
+0.424324 0.635045 0.12593 -0.633094
+0.337804 0.684998 0.207488 -0.611241
+0.245503 0.72323 0.285496 -0.578929
+0.149003 0.749087 0.358619 -0.536711
+0.0499524 0.762127 0.425606 -0.48531
+-0.0499525 0.762127 0.485311 -0.425606
+-0.149003 0.749087 0.536711 -0.358619
+-0.245504 0.72323 0.578929 -0.285496
+-0.337804 0.684998 0.611241 -0.207488
+-0.424324 0.635045 0.633094 -0.12593
+-0.503584 0.574227 0.644115 -0.0422174
+-0.574227 0.503584 0.644115 0.0422176
+-0.635045 0.424324 0.633094 0.12593
+-0.684998 0.337804 0.611241 0.207488
+-0.72323 0.245503 0.578929 0.285496
+-0.749087 0.149003 0.536711 0.358619
+-0.762127 0.0499523 0.48531 0.425606
+0.814748 0.0534014 -0.288675 -0.5
+0.800808 0.159291 -0.220942 -0.533402
+0.773165 0.262454 -0.149429 -0.557678
+0.732294 0.361127 -0.0753593 -0.572411
+0.678892 0.453621 8.60604e-10 -0.57735
+0.613875 0.538354 0.0753593 -0.572411
+0.538354 0.613875 0.149429 -0.557678
+0.453621 0.678892 0.220942 -0.533402
+0.361127 0.732294 0.288675 -0.5
+0.262454 0.773165 0.351469 -0.458043
+0.159291 0.800808 0.408248 -0.408248
+0.0534014 0.814748 0.458043 -0.351469
+-0.0534015 0.814748 0.5 -0.288675
+-0.159291 0.800808 0.533402 -0.220942
+-0.262454 0.773165 0.557678 -0.149429
+-0.361127 0.732293 0.572411 -0.0753592
+-0.453621 0.678892 0.57735 9.49229e-08
+-0.538354 0.613875 0.572411 0.0753594
+-0.613875 0.538354 0.557678 0.149429
+-0.678892 0.453621 0.533402 0.220942
+-0.732294 0.361127 0.5 0.288675
+-0.773165 0.262454 0.458043 0.351469
+-0.800808 0.15929 0.408248 0.408248
+-0.814748 0.0534013 0.351468 0.458043
+0.814748 0.0534014 -0.458043 -0.351469
+0.800808 0.159291 -0.408248 -0.408248
+0.773165 0.262454 -0.351469 -0.458043
+0.732294 0.361127 -0.288675 -0.5
+0.678892 0.453621 -0.220942 -0.533402
+0.613875 0.538354 -0.149429 -0.557678
+0.538354 0.613875 -0.0753593 -0.572411
+0.453621 0.678892 2.43762e-08 -0.57735
+0.361127 0.732294 0.0753593 -0.572411
+0.262454 0.773165 0.149429 -0.557678
+0.159291 0.800808 0.220942 -0.533402
+0.0534014 0.814748 0.288675 -0.5
+-0.0534015 0.814748 0.351469 -0.458043
+-0.159291 0.800808 0.408248 -0.408248
+-0.262454 0.773165 0.458043 -0.351469
+-0.361127 0.732293 0.5 -0.288675
+-0.453621 0.678892 0.533402 -0.220942
+-0.538354 0.613875 0.557678 -0.149429
+-0.613875 0.538354 0.572411 -0.0753593
+-0.678892 0.453621 0.57735 4.9613e-08
+-0.732294 0.361127 0.572411 0.0753594
+-0.773165 0.262454 0.557677 0.149429
+-0.800808 0.15929 0.533402 0.220943
+-0.814748 0.0534013 0.5 0.288675
+0.864171 0.0566408 -0.329673 -0.37592
+0.849385 0.168953 -0.277785 -0.415735
+0.820066 0.278375 -0.221144 -0.448436
+0.776715 0.383033 -0.16072 -0.473465
+0.720074 0.481138 -0.0975452 -0.490393
+0.651112 0.57101 -0.0327016 -0.498929
+0.57101 0.651112 0.0327016 -0.498929
+0.481138 0.720074 0.0975452 -0.490393
+0.383033 0.776715 0.16072 -0.473465
+0.278375 0.820066 0.221144 -0.448436
+0.168953 0.849385 0.277785 -0.415735
+0.0566407 0.864171 0.329673 -0.37592
+-0.0566408 0.864171 0.37592 -0.329673
+-0.168953 0.849385 0.415735 -0.277785
+-0.278375 0.820066 0.448436 -0.221144
+-0.383033 0.776715 0.473465 -0.16072
+-0.481138 0.720074 0.490393 -0.0975451
+-0.57101 0.651112 0.498929 -0.0327015
+-0.651112 0.57101 0.498929 0.0327016
+-0.720074 0.481138 0.490393 0.0975452
+-0.776715 0.383033 0.473465 0.16072
+-0.820066 0.278375 0.448436 0.221144
+-0.849385 0.168953 0.415735 0.277785
+-0.864171 0.0566406 0.37592 0.329673
+0.864171 0.0566408 -0.16072 -0.473465
+0.849385 0.168953 -0.0975452 -0.490393
+0.820066 0.278375 -0.0327016 -0.498929
+0.776715 0.383033 0.0327016 -0.498929
+0.720074 0.481138 0.0975452 -0.490393
+0.651112 0.57101 0.16072 -0.473465
+0.57101 0.651112 0.221144 -0.448436
+0.481138 0.720074 0.277785 -0.415735
+0.383033 0.776715 0.329673 -0.37592
+0.278375 0.820066 0.37592 -0.329673
+0.168953 0.849385 0.415735 -0.277785
+0.0566407 0.864171 0.448436 -0.221144
+-0.0566408 0.864171 0.473465 -0.16072
+-0.168953 0.849385 0.490393 -0.0975452
+-0.278375 0.820066 0.498929 -0.0327015
+-0.383033 0.776715 0.498929 0.0327017
+-0.481138 0.720074 0.490393 0.0975452
+-0.57101 0.651112 0.473465 0.16072
+-0.651112 0.57101 0.448436 0.221144
+-0.720074 0.481138 0.415735 0.277785
+-0.776715 0.383033 0.37592 0.329673
+-0.820066 0.278375 0.329673 0.37592
+-0.849385 0.168953 0.277785 0.415735
+-0.864171 0.0566406 0.221144 0.448436
+0.910916 0.0597046 -0.0532871 -0.404756
+0.89533 0.178092 2.23064e-09 -0.408248
+0.864425 0.293433 0.0532871 -0.404756
+0.818729 0.403753 0.105662 -0.394338
+0.759024 0.507164 0.15623 -0.377172
+0.686333 0.601898 0.204124 -0.353553
+0.601898 0.686333 0.248526 -0.323885
+0.507164 0.759024 0.288675 -0.288675
+0.403752 0.818729 0.323885 -0.248526
+0.293433 0.864425 0.353553 -0.204124
+0.178092 0.89533 0.377172 -0.15623
+0.0597046 0.910916 0.394338 -0.105662
+-0.0597046 0.910916 0.404756 -0.0532871
+-0.178092 0.89533 0.408248 1.82562e-09
+-0.293433 0.864425 0.404756 0.0532871
+-0.403753 0.818729 0.394338 0.105663
+-0.507164 0.759024 0.377172 0.15623
+-0.601898 0.686333 0.353553 0.204124
+-0.686333 0.601898 0.323885 0.248526
+-0.759024 0.507164 0.288675 0.288675
+-0.818729 0.403752 0.248526 0.323885
+-0.864425 0.293433 0.204124 0.353553
+-0.89533 0.178092 0.15623 0.377172
+-0.910916 0.0597044 0.105662 0.394338
+0.910916 0.0597046 -0.204124 -0.353553
+0.89533 0.178092 -0.15623 -0.377172
+0.864425 0.293433 -0.105662 -0.394338
+0.818729 0.403753 -0.0532871 -0.404756
+0.759024 0.507164 6.08539e-10 -0.408248
+0.686333 0.601898 0.0532871 -0.404756
+0.601898 0.686333 0.105662 -0.394338
+0.507164 0.759024 0.15623 -0.377172
+0.403752 0.818729 0.204124 -0.353553
+0.293433 0.864425 0.248526 -0.323885
+0.178092 0.89533 0.288675 -0.288675
+0.0597046 0.910916 0.323885 -0.248526
+-0.0597046 0.910916 0.353553 -0.204124
+-0.178092 0.89533 0.377172 -0.15623
+-0.293433 0.864425 0.394338 -0.105662
+-0.403753 0.818729 0.404756 -0.053287
+-0.507164 0.759024 0.408248 6.71206e-08
+-0.601898 0.686333 0.404756 0.0532872
+-0.686333 0.601898 0.394338 0.105662
+-0.759024 0.507164 0.377172 0.15623
+-0.818729 0.403752 0.353553 0.204124
+-0.864425 0.293433 0.323885 0.248526
+-0.89533 0.178092 0.288675 0.288675
+-0.910916 0.0597044 0.248526 0.323885
+0.949933 0.0622619 -0.059734 -0.300303
+0.93368 0.18572 -0.0200255 -0.305531
+0.901451 0.306001 0.0200255 -0.305531
+0.853797 0.421046 0.059734 -0.300303
+0.791535 0.528887 0.0984203 -0.289937
+0.71573 0.627678 0.135423 -0.27461
+0.627679 0.71573 0.170108 -0.254585
+0.528887 0.791536 0.201883 -0.230203
+0.421046 0.853797 0.230203 -0.201883
+0.306001 0.901451 0.254585 -0.170108
+0.18572 0.93368 0.27461 -0.135423
+0.0622619 0.949933 0.289937 -0.0984203
+-0.062262 0.949933 0.300303 -0.059734
+-0.18572 0.93368 0.305531 -0.0200255
+-0.306001 0.901451 0.305531 0.0200256
+-0.421046 0.853797 0.300303 0.059734
+-0.528887 0.791535 0.289937 0.0984204
+-0.627679 0.71573 0.27461 0.135423
+-0.71573 0.627678 0.254585 0.170108
+-0.791536 0.528887 0.230203 0.201883
+-0.853797 0.421046 0.201883 0.230203
+-0.901451 0.306001 0.170108 0.254585
+-0.93368 0.18572 0.135423 0.27461
+-0.949933 0.0622617 0.0984203 0.289937
+0.864171 0.0566408 -0.448436 -0.221144
+0.849385 0.168953 -0.415735 -0.277785
+0.820066 0.278375 -0.37592 -0.329673
+0.776715 0.383033 -0.329673 -0.37592
+0.720074 0.481138 -0.277785 -0.415735
+0.651112 0.57101 -0.221144 -0.448436
+0.57101 0.651112 -0.16072 -0.473465
+0.481138 0.720074 -0.0975451 -0.490393
+0.383033 0.776715 -0.0327016 -0.498929
+0.278375 0.820066 0.0327016 -0.498929
+0.168953 0.849385 0.0975451 -0.490393
+0.0566407 0.864171 0.16072 -0.473465
+-0.0566408 0.864171 0.221144 -0.448436
+-0.168953 0.849385 0.277785 -0.415735
+-0.278375 0.820066 0.329673 -0.37592
+-0.383033 0.776715 0.37592 -0.329673
+-0.481138 0.720074 0.415735 -0.277785
+-0.57101 0.651112 0.448436 -0.221144
+-0.651112 0.57101 0.473465 -0.16072
+-0.720074 0.481138 0.490393 -0.0975451
+-0.776715 0.383033 0.498929 -0.0327015
+-0.820066 0.278375 0.498929 0.0327017
+-0.849385 0.168953 0.490393 0.0975453
+-0.864171 0.0566406 0.473465 0.16072
+0.910916 0.0597046 -0.323885 -0.248526
+0.89533 0.178092 -0.288675 -0.288675
+0.864425 0.293433 -0.248526 -0.323885
+0.818729 0.403753 -0.204124 -0.353553
+0.759024 0.507164 -0.15623 -0.377172
+0.686333 0.601898 -0.105662 -0.394338
+0.601898 0.686333 -0.0532871 -0.404756
+0.507164 0.759024 1.72366e-08 -0.408248
+0.403752 0.818729 0.0532871 -0.404756
+0.293433 0.864425 0.105662 -0.394338
+0.178092 0.89533 0.15623 -0.377172
+0.0597046 0.910916 0.204124 -0.353553
+-0.0597046 0.910916 0.248526 -0.323885
+-0.178092 0.89533 0.288675 -0.288675
+-0.293433 0.864425 0.323885 -0.248526
+-0.403753 0.818729 0.353553 -0.204124
+-0.507164 0.759024 0.377172 -0.15623
+-0.601898 0.686333 0.394338 -0.105662
+-0.686333 0.601898 0.404756 -0.053287
+-0.759024 0.507164 0.408248 3.50817e-08
+-0.818729 0.403752 0.404756 0.0532871
+-0.864425 0.293433 0.394338 0.105663
+-0.89533 0.178092 0.377172 0.15623
+-0.910916 0.0597044 0.353553 0.204124
+0.910916 0.0597046 -0.394338 -0.105662
+0.89533 0.178092 -0.377172 -0.15623
+0.864425 0.293433 -0.353553 -0.204124
+0.818729 0.403753 -0.323885 -0.248526
+0.759024 0.507164 -0.288675 -0.288675
+0.686333 0.601898 -0.248526 -0.323885
+0.601898 0.686333 -0.204124 -0.353553
+0.507164 0.759024 -0.15623 -0.377172
+0.403752 0.818729 -0.105662 -0.394338
+0.293433 0.864425 -0.0532871 -0.404756
+0.178092 0.89533 -1.48024e-08 -0.408248
+0.0597046 0.910916 0.0532871 -0.404756
+-0.0597046 0.910916 0.105662 -0.394338
+-0.178092 0.89533 0.15623 -0.377172
+-0.293433 0.864425 0.204124 -0.353553
+-0.403753 0.818729 0.248526 -0.323885
+-0.507164 0.759024 0.288675 -0.288675
+-0.601898 0.686333 0.323885 -0.248526
+-0.686333 0.601898 0.353553 -0.204124
+-0.759024 0.507164 0.377172 -0.15623
+-0.818729 0.403752 0.394338 -0.105662
+-0.864425 0.293433 0.404756 -0.053287
+-0.89533 0.178092 0.408248 1.00377e-07
+-0.910916 0.0597044 0.404756 0.0532872
+0.949933 0.0622619 -0.289937 -0.0984203
+0.93368 0.18572 -0.27461 -0.135423
+0.901451 0.306001 -0.254585 -0.170108
+0.853797 0.421046 -0.230203 -0.201883
+0.791535 0.528887 -0.201883 -0.230203
+0.71573 0.627678 -0.170108 -0.254585
+0.627679 0.71573 -0.135423 -0.27461
+0.528887 0.791536 -0.0984203 -0.289937
+0.421046 0.853797 -0.059734 -0.300303
+0.306001 0.901451 -0.0200255 -0.305531
+0.18572 0.93368 0.0200255 -0.305531
+0.0622619 0.949933 0.059734 -0.300303
+-0.062262 0.949933 0.0984203 -0.289937
+-0.18572 0.93368 0.135423 -0.27461
+-0.306001 0.901451 0.170108 -0.254585
+-0.421046 0.853797 0.201883 -0.230203
+-0.528887 0.791535 0.230203 -0.201883
+-0.627679 0.71573 0.254585 -0.170108
+-0.71573 0.627678 0.27461 -0.135423
+-0.791536 0.528887 0.289937 -0.0984203
+-0.853797 0.421046 0.300303 -0.0597339
+-0.901451 0.306001 0.305531 -0.0200255
+-0.93368 0.18572 0.305531 0.0200256
+-0.949933 0.0622617 0.300303 0.059734
+0.949933 0.0622619 -0.201883 -0.230203
+0.93368 0.18572 -0.170108 -0.254585
+0.901451 0.306001 -0.135423 -0.27461
+0.853797 0.421046 -0.0984203 -0.289937
+0.791535 0.528887 -0.059734 -0.300303
+0.71573 0.627678 -0.0200255 -0.305531
+0.627679 0.71573 0.0200255 -0.305531
+0.528887 0.791536 0.059734 -0.300303
+0.421046 0.853797 0.0984203 -0.289937
+0.306001 0.901451 0.135423 -0.27461
+0.18572 0.93368 0.170108 -0.254585
+0.0622619 0.949933 0.201883 -0.230203
+-0.062262 0.949933 0.230203 -0.201883
+-0.18572 0.93368 0.254585 -0.170108
+-0.306001 0.901451 0.27461 -0.135423
+-0.421046 0.853797 0.289937 -0.0984203
+-0.528887 0.791535 0.300303 -0.0597339
+-0.627679 0.71573 0.305531 -0.0200255
+-0.71573 0.627678 0.305531 0.0200256
+-0.791536 0.528887 0.300303 0.059734
+-0.853797 0.421046 0.289937 0.0984204
+-0.901451 0.306001 0.27461 0.135423
+-0.93368 0.18572 0.254584 0.170108
+-0.949933 0.0622617 0.230203 0.201883
+0.976849 0.0640261 -0.0656136 -0.193291
+0.960135 0.190983 -0.0398226 -0.200202
+0.926993 0.314672 -0.0133504 -0.203687
+0.877989 0.432976 0.0133504 -0.203687
+0.813963 0.543873 0.0398226 -0.200202
+0.73601 0.645463 0.0656136 -0.193291
+0.645463 0.73601 0.0902818 -0.183073
+0.543873 0.813963 0.113405 -0.169723
+0.432976 0.877989 0.134588 -0.153469
+0.314672 0.926993 0.153469 -0.134588
+0.190983 0.960135 0.169723 -0.113405
+0.064026 0.976849 0.183073 -0.0902818
+-0.0640261 0.976849 0.193291 -0.0656136
+-0.190983 0.960135 0.200202 -0.0398226
+-0.314672 0.926992 0.203687 -0.0133503
+-0.432976 0.877989 0.203687 0.0133504
+-0.543873 0.813963 0.200202 0.0398227
+-0.645463 0.73601 0.193291 0.0656136
+-0.73601 0.645463 0.183073 0.0902818
+-0.813963 0.543873 0.169723 0.113405
+-0.877989 0.432976 0.153469 0.134588
+-0.926993 0.314671 0.134588 0.153469
+-0.960135 0.190982 0.113405 0.169723
+-0.976849 0.0640259 0.0902818 0.183073
+0.976849 0.0640261 -0.183073 -0.0902818
+0.960135 0.190983 -0.169723 -0.113405
+0.926993 0.314672 -0.153469 -0.134588
+0.877989 0.432976 -0.134588 -0.153469
+0.813963 0.543873 -0.113405 -0.169723
+0.73601 0.645463 -0.0902818 -0.183073
+0.645463 0.73601 -0.0656136 -0.193291
+0.543873 0.813963 -0.0398226 -0.200202
+0.432976 0.877989 -0.0133504 -0.203687
+0.314672 0.926993 0.0133504 -0.203687
+0.190983 0.960135 0.0398226 -0.200202
+0.064026 0.976849 0.0656136 -0.193291
+-0.0640261 0.976849 0.0902818 -0.183073
+-0.190983 0.960135 0.113405 -0.169723
+-0.314672 0.926992 0.134588 -0.153469
+-0.432976 0.877989 0.153469 -0.134588
+-0.543873 0.813963 0.169723 -0.113405
+-0.645463 0.73601 0.183073 -0.0902818
+-0.73601 0.645463 0.193291 -0.0656135
+-0.813963 0.543873 0.200202 -0.0398226
+-0.877989 0.432976 0.203687 -0.0133503
+-0.926993 0.314671 0.203687 0.0133504
+-0.960135 0.190982 0.200202 0.0398227
+-0.976849 0.0640259 0.193291 0.0656136
+0.992648 0.0650616 -0.0672942 -0.0767343
+0.975664 0.194072 -0.0567026 -0.0848615
+0.941985 0.319761 -0.0451409 -0.0915367
+0.892189 0.439979 -0.0328068 -0.0966457
+0.827128 0.552669 -0.0199113 -0.100101
+0.747914 0.655903 -0.00667518 -0.101844
+0.655903 0.747914 0.00667518 -0.101844
+0.552669 0.827128 0.0199113 -0.100101
+0.439979 0.892189 0.0328068 -0.0966457
+0.319761 0.941985 0.0451409 -0.0915367
+0.194072 0.975664 0.0567026 -0.0848615
+0.0650615 0.992648 0.0672942 -0.0767343
+-0.0650616 0.992648 0.0767343 -0.0672942
+-0.194072 0.975664 0.0848615 -0.0567026
+-0.319761 0.941985 0.0915367 -0.0451409
+-0.439979 0.892189 0.0966457 -0.0328068
+-0.552669 0.827128 0.100101 -0.0199113
+-0.655903 0.747914 0.101844 -0.00667516
+-0.747914 0.655903 0.101844 0.00667519
+-0.827128 0.552669 0.100101 0.0199113
+-0.892189 0.439979 0.0966456 0.0328068
+-0.941985 0.319761 0.0915367 0.0451409
+-0.975664 0.194071 0.0848615 0.0567027
+-0.992648 0.0650614 0.0767343 0.0672942
+0.762127 0.0499525 0.485311 -0.425606
+0.749087 0.149003 0.536711 -0.358619
+0.72323 0.245503 0.578929 -0.285496
+0.684998 0.337804 0.611241 -0.207488
+0.635045 0.424324 0.633094 -0.12593
+0.574227 0.503584 0.644115 -0.0422176
+0.503584 0.574227 0.644115 0.0422175
+0.424324 0.635045 0.633094 0.12593
+0.337804 0.684998 0.611241 0.207488
+0.245503 0.72323 0.578929 0.285496
+0.149003 0.749087 0.536711 0.358619
+0.0499524 0.762127 0.48531 0.425606
+-0.0499525 0.762127 0.425606 0.485311
+-0.149003 0.749087 0.358619 0.536711
+-0.245504 0.72323 0.285496 0.578929
+-0.337804 0.684998 0.207488 0.611241
+-0.424324 0.635045 0.12593 0.633094
+-0.503584 0.574227 0.0422174 0.644115
+-0.574227 0.503584 -0.0422176 0.644115
+-0.635045 0.424324 -0.12593 0.633094
+-0.684998 0.337804 -0.207488 0.611241
+-0.72323 0.245503 -0.285496 0.578929
+-0.749087 0.149003 -0.358619 0.536711
+-0.762127 0.0499523 -0.425606 0.48531
+0.814748 0.0534014 0.5 -0.288675
+0.800808 0.159291 0.533402 -0.220942
+0.773165 0.262454 0.557678 -0.149429
+0.732294 0.361127 0.572411 -0.0753593
+0.678892 0.453621 0.57735 8.60604e-10
+0.613875 0.538354 0.572411 0.0753593
+0.538354 0.613875 0.557678 0.149429
+0.453621 0.678892 0.533402 0.220942
+0.361127 0.732294 0.5 0.288675
+0.262454 0.773165 0.458043 0.351469
+0.159291 0.800808 0.408248 0.408248
+0.0534014 0.814748 0.351469 0.458043
+-0.0534015 0.814748 0.288675 0.5
+-0.159291 0.800808 0.220942 0.533402
+-0.262454 0.773165 0.149429 0.557678
+-0.361127 0.732293 0.0753592 0.572411
+-0.453621 0.678892 -9.49229e-08 0.57735
+-0.538354 0.613875 -0.0753594 0.572411
+-0.613875 0.538354 -0.149429 0.557678
+-0.678892 0.453621 -0.220942 0.533402
+-0.732294 0.361127 -0.288675 0.5
+-0.773165 0.262454 -0.351469 0.458043
+-0.800808 0.15929 -0.408248 0.408248
+-0.814748 0.0534013 -0.458043 0.351468
+0.814748 0.0534014 0.351469 -0.458043
+0.800808 0.159291 0.408248 -0.408248
+0.773165 0.262454 0.458043 -0.351469
+0.732294 0.361127 0.5 -0.288675
+0.678892 0.453621 0.533402 -0.220942
+0.613875 0.538354 0.557678 -0.149429
+0.538354 0.613875 0.572411 -0.0753593
+0.453621 0.678892 0.57735 2.43762e-08
+0.361127 0.732294 0.572411 0.0753593
+0.262454 0.773165 0.557678 0.149429
+0.159291 0.800808 0.533402 0.220942
+0.0534014 0.814748 0.5 0.288675
+-0.0534015 0.814748 0.458043 0.351469
+-0.159291 0.800808 0.408248 0.408248
+-0.262454 0.773165 0.351469 0.458043
+-0.361127 0.732293 0.288675 0.5
+-0.453621 0.678892 0.220942 0.533402
+-0.538354 0.613875 0.149429 0.557678
+-0.613875 0.538354 0.0753593 0.572411
+-0.678892 0.453621 -4.9613e-08 0.57735
+-0.732294 0.361127 -0.0753594 0.572411
+-0.773165 0.262454 -0.149429 0.557677
+-0.800808 0.15929 -0.220943 0.533402
+-0.814748 0.0534013 -0.288675 0.5
+0.864171 0.0566408 0.37592 -0.329673
+0.849385 0.168953 0.415735 -0.277785
+0.820066 0.278375 0.448436 -0.221144
+0.776715 0.383033 0.473465 -0.16072
+0.720074 0.481138 0.490393 -0.0975452
+0.651112 0.57101 0.498929 -0.0327016
+0.57101 0.651112 0.498929 0.0327016
+0.481138 0.720074 0.490393 0.0975452
+0.383033 0.776715 0.473465 0.16072
+0.278375 0.820066 0.448436 0.221144
+0.168953 0.849385 0.415735 0.277785
+0.0566407 0.864171 0.37592 0.329673
+-0.0566408 0.864171 0.329673 0.37592
+-0.168953 0.849385 0.277785 0.415735
+-0.278375 0.820066 0.221144 0.448436
+-0.383033 0.776715 0.16072 0.473465
+-0.481138 0.720074 0.0975451 0.490393
+-0.57101 0.651112 0.0327015 0.498929
+-0.651112 0.57101 -0.0327016 0.498929
+-0.720074 0.481138 -0.0975452 0.490393
+-0.776715 0.383033 -0.16072 0.473465
+-0.820066 0.278375 -0.221144 0.448436
+-0.849385 0.168953 -0.277785 0.415735
+-0.864171 0.0566406 -0.329673 0.37592
+0.864171 0.0566408 0.473465 -0.16072
+0.849385 0.168953 0.490393 -0.0975452
+0.820066 0.278375 0.498929 -0.0327016
+0.776715 0.383033 0.498929 0.0327016
+0.720074 0.481138 0.490393 0.0975452
+0.651112 0.57101 0.473465 0.16072
+0.57101 0.651112 0.448436 0.221144
+0.481138 0.720074 0.415735 0.277785
+0.383033 0.776715 0.37592 0.329673
+0.278375 0.820066 0.329673 0.37592
+0.168953 0.849385 0.277785 0.415735
+0.0566407 0.864171 0.221144 0.448436
+-0.0566408 0.864171 0.16072 0.473465
+-0.168953 0.849385 0.0975452 0.490393
+-0.278375 0.820066 0.0327015 0.498929
+-0.383033 0.776715 -0.0327017 0.498929
+-0.481138 0.720074 -0.0975452 0.490393
+-0.57101 0.651112 -0.16072 0.473465
+-0.651112 0.57101 -0.221144 0.448436
+-0.720074 0.481138 -0.277785 0.415735
+-0.776715 0.383033 -0.329673 0.37592
+-0.820066 0.278375 -0.37592 0.329673
+-0.849385 0.168953 -0.415735 0.277785
+-0.864171 0.0566406 -0.448436 0.221144
+0.910916 0.0597046 0.404756 -0.0532871
+0.89533 0.178092 0.408248 2.23064e-09
+0.864425 0.293433 0.404756 0.0532871
+0.818729 0.403753 0.394338 0.105662
+0.759024 0.507164 0.377172 0.15623
+0.686333 0.601898 0.353553 0.204124
+0.601898 0.686333 0.323885 0.248526
+0.507164 0.759024 0.288675 0.288675
+0.403752 0.818729 0.248526 0.323885
+0.293433 0.864425 0.204124 0.353553
+0.178092 0.89533 0.15623 0.377172
+0.0597046 0.910916 0.105662 0.394338
+-0.0597046 0.910916 0.0532871 0.404756
+-0.178092 0.89533 -1.82562e-09 0.408248
+-0.293433 0.864425 -0.0532871 0.404756
+-0.403753 0.818729 -0.105663 0.394338
+-0.507164 0.759024 -0.15623 0.377172
+-0.601898 0.686333 -0.204124 0.353553
+-0.686333 0.601898 -0.248526 0.323885
+-0.759024 0.507164 -0.288675 0.288675
+-0.818729 0.403752 -0.323885 0.248526
+-0.864425 0.293433 -0.353553 0.204124
+-0.89533 0.178092 -0.377172 0.15623
+-0.910916 0.0597044 -0.394338 0.105662
+0.910916 0.0597046 0.353553 -0.204124
+0.89533 0.178092 0.377172 -0.15623
+0.864425 0.293433 0.394338 -0.105662
+0.818729 0.403753 0.404756 -0.0532871
+0.759024 0.507164 0.408248 6.08539e-10
+0.686333 0.601898 0.404756 0.0532871
+0.601898 0.686333 0.394338 0.105662
+0.507164 0.759024 0.377172 0.15623
+0.403752 0.818729 0.353553 0.204124
+0.293433 0.864425 0.323885 0.248526
+0.178092 0.89533 0.288675 0.288675
+0.0597046 0.910916 0.248526 0.323885
+-0.0597046 0.910916 0.204124 0.353553
+-0.178092 0.89533 0.15623 0.377172
+-0.293433 0.864425 0.105662 0.394338
+-0.403753 0.818729 0.053287 0.404756
+-0.507164 0.759024 -6.71206e-08 0.408248
+-0.601898 0.686333 -0.0532872 0.404756
+-0.686333 0.601898 -0.105662 0.394338
+-0.759024 0.507164 -0.15623 0.377172
+-0.818729 0.403752 -0.204124 0.353553
+-0.864425 0.293433 -0.248526 0.323885
+-0.89533 0.178092 -0.288675 0.288675
+-0.910916 0.0597044 -0.323885 0.248526
+0.949933 0.0622619 0.300303 -0.059734
+0.93368 0.18572 0.305531 -0.0200255
+0.901451 0.306001 0.305531 0.0200255
+0.853797 0.421046 0.300303 0.059734
+0.791535 0.528887 0.289937 0.0984203
+0.71573 0.627678 0.27461 0.135423
+0.627679 0.71573 0.254585 0.170108
+0.528887 0.791536 0.230203 0.201883
+0.421046 0.853797 0.201883 0.230203
+0.306001 0.901451 0.170108 0.254585
+0.18572 0.93368 0.135423 0.27461
+0.0622619 0.949933 0.0984203 0.289937
+-0.062262 0.949933 0.059734 0.300303
+-0.18572 0.93368 0.0200255 0.305531
+-0.306001 0.901451 -0.0200256 0.305531
+-0.421046 0.853797 -0.059734 0.300303
+-0.528887 0.791535 -0.0984204 0.289937
+-0.627679 0.71573 -0.135423 0.27461
+-0.71573 0.627678 -0.170108 0.254585
+-0.791536 0.528887 -0.201883 0.230203
+-0.853797 0.421046 -0.230203 0.201883
+-0.901451 0.306001 -0.254585 0.170108
+-0.93368 0.18572 -0.27461 0.135423
+-0.949933 0.0622617 -0.289937 0.0984203
+0.864171 0.0566408 0.221144 -0.448436
+0.849385 0.168953 0.277785 -0.415735
+0.820066 0.278375 0.329673 -0.37592
+0.776715 0.383033 0.37592 -0.329673
+0.720074 0.481138 0.415735 -0.277785
+0.651112 0.57101 0.448436 -0.221144
+0.57101 0.651112 0.473465 -0.16072
+0.481138 0.720074 0.490393 -0.0975451
+0.383033 0.776715 0.498929 -0.0327016
+0.278375 0.820066 0.498929 0.0327016
+0.168953 0.849385 0.490393 0.0975451
+0.0566407 0.864171 0.473465 0.16072
+-0.0566408 0.864171 0.448436 0.221144
+-0.168953 0.849385 0.415735 0.277785
+-0.278375 0.820066 0.37592 0.329673
+-0.383033 0.776715 0.329673 0.37592
+-0.481138 0.720074 0.277785 0.415735
+-0.57101 0.651112 0.221144 0.448436
+-0.651112 0.57101 0.16072 0.473465
+-0.720074 0.481138 0.0975451 0.490393
+-0.776715 0.383033 0.0327015 0.498929
+-0.820066 0.278375 -0.0327017 0.498929
+-0.849385 0.168953 -0.0975453 0.490393
+-0.864171 0.0566406 -0.16072 0.473465
+0.910916 0.0597046 0.248526 -0.323885
+0.89533 0.178092 0.288675 -0.288675
+0.864425 0.293433 0.323885 -0.248526
+0.818729 0.403753 0.353553 -0.204124
+0.759024 0.507164 0.377172 -0.15623
+0.686333 0.601898 0.394338 -0.105662
+0.601898 0.686333 0.404756 -0.0532871
+0.507164 0.759024 0.408248 1.72366e-08
+0.403752 0.818729 0.404756 0.0532871
+0.293433 0.864425 0.394338 0.105662
+0.178092 0.89533 0.377172 0.15623
+0.0597046 0.910916 0.353553 0.204124
+-0.0597046 0.910916 0.323885 0.248526
+-0.178092 0.89533 0.288675 0.288675
+-0.293433 0.864425 0.248526 0.323885
+-0.403753 0.818729 0.204124 0.353553
+-0.507164 0.759024 0.15623 0.377172
+-0.601898 0.686333 0.105662 0.394338
+-0.686333 0.601898 0.053287 0.404756
+-0.759024 0.507164 -3.50817e-08 0.408248
+-0.818729 0.403752 -0.0532871 0.404756
+-0.864425 0.293433 -0.105663 0.394338
+-0.89533 0.178092 -0.15623 0.377172
+-0.910916 0.0597044 -0.204124 0.353553
+0.910916 0.0597046 0.105662 -0.394338
+0.89533 0.178092 0.15623 -0.377172
+0.864425 0.293433 0.204124 -0.353553
+0.818729 0.403753 0.248526 -0.323885
+0.759024 0.507164 0.288675 -0.288675
+0.686333 0.601898 0.323885 -0.248526
+0.601898 0.686333 0.353553 -0.204124
+0.507164 0.759024 0.377172 -0.15623
+0.403752 0.818729 0.394338 -0.105662
+0.293433 0.864425 0.404756 -0.0532871
+0.178092 0.89533 0.408248 -1.48024e-08
+0.0597046 0.910916 0.404756 0.0532871
+-0.0597046 0.910916 0.394338 0.105662
+-0.178092 0.89533 0.377172 0.15623
+-0.293433 0.864425 0.353553 0.204124
+-0.403753 0.818729 0.323885 0.248526
+-0.507164 0.759024 0.288675 0.288675
+-0.601898 0.686333 0.248526 0.323885
+-0.686333 0.601898 0.204124 0.353553
+-0.759024 0.507164 0.15623 0.377172
+-0.818729 0.403752 0.105662 0.394338
+-0.864425 0.293433 0.053287 0.404756
+-0.89533 0.178092 -1.00377e-07 0.408248
+-0.910916 0.0597044 -0.0532872 0.404756
+0.949933 0.0622619 0.0984203 -0.289937
+0.93368 0.18572 0.135423 -0.27461
+0.901451 0.306001 0.170108 -0.254585
+0.853797 0.421046 0.201883 -0.230203
+0.791535 0.528887 0.230203 -0.201883
+0.71573 0.627678 0.254585 -0.170108
+0.627679 0.71573 0.27461 -0.135423
+0.528887 0.791536 0.289937 -0.0984203
+0.421046 0.853797 0.300303 -0.059734
+0.306001 0.901451 0.305531 -0.0200255
+0.18572 0.93368 0.305531 0.0200255
+0.0622619 0.949933 0.300303 0.059734
+-0.062262 0.949933 0.289937 0.0984203
+-0.18572 0.93368 0.27461 0.135423
+-0.306001 0.901451 0.254585 0.170108
+-0.421046 0.853797 0.230203 0.201883
+-0.528887 0.791535 0.201883 0.230203
+-0.627679 0.71573 0.170108 0.254585
+-0.71573 0.627678 0.135423 0.27461
+-0.791536 0.528887 0.0984203 0.289937
+-0.853797 0.421046 0.0597339 0.300303
+-0.901451 0.306001 0.0200255 0.305531
+-0.93368 0.18572 -0.0200256 0.305531
+-0.949933 0.0622617 -0.059734 0.300303
+0.949933 0.0622619 0.230203 -0.201883
+0.93368 0.18572 0.254585 -0.170108
+0.901451 0.306001 0.27461 -0.135423
+0.853797 0.421046 0.289937 -0.0984203
+0.791535 0.528887 0.300303 -0.059734
+0.71573 0.627678 0.305531 -0.0200255
+0.627679 0.71573 0.305531 0.0200255
+0.528887 0.791536 0.300303 0.059734
+0.421046 0.853797 0.289937 0.0984203
+0.306001 0.901451 0.27461 0.135423
+0.18572 0.93368 0.254585 0.170108
+0.0622619 0.949933 0.230203 0.201883
+-0.062262 0.949933 0.201883 0.230203
+-0.18572 0.93368 0.170108 0.254585
+-0.306001 0.901451 0.135423 0.27461
+-0.421046 0.853797 0.0984203 0.289937
+-0.528887 0.791535 0.0597339 0.300303
+-0.627679 0.71573 0.0200255 0.305531
+-0.71573 0.627678 -0.0200256 0.305531
+-0.791536 0.528887 -0.059734 0.300303
+-0.853797 0.421046 -0.0984204 0.289937
+-0.901451 0.306001 -0.135423 0.27461
+-0.93368 0.18572 -0.170108 0.254584
+-0.949933 0.0622617 -0.201883 0.230203
+0.976849 0.0640261 0.193291 -0.0656136
+0.960135 0.190983 0.200202 -0.0398226
+0.926993 0.314672 0.203687 -0.0133504
+0.877989 0.432976 0.203687 0.0133504
+0.813963 0.543873 0.200202 0.0398226
+0.73601 0.645463 0.193291 0.0656136
+0.645463 0.73601 0.183073 0.0902818
+0.543873 0.813963 0.169723 0.113405
+0.432976 0.877989 0.153469 0.134588
+0.314672 0.926993 0.134588 0.153469
+0.190983 0.960135 0.113405 0.169723
+0.064026 0.976849 0.0902818 0.183073
+-0.0640261 0.976849 0.0656136 0.193291
+-0.190983 0.960135 0.0398226 0.200202
+-0.314672 0.926992 0.0133503 0.203687
+-0.432976 0.877989 -0.0133504 0.203687
+-0.543873 0.813963 -0.0398227 0.200202
+-0.645463 0.73601 -0.0656136 0.193291
+-0.73601 0.645463 -0.0902818 0.183073
+-0.813963 0.543873 -0.113405 0.169723
+-0.877989 0.432976 -0.134588 0.153469
+-0.926993 0.314671 -0.153469 0.134588
+-0.960135 0.190982 -0.169723 0.113405
+-0.976849 0.0640259 -0.183073 0.0902818
+0.976849 0.0640261 0.0902818 -0.183073
+0.960135 0.190983 0.113405 -0.169723
+0.926993 0.314672 0.134588 -0.153469
+0.877989 0.432976 0.153469 -0.134588
+0.813963 0.543873 0.169723 -0.113405
+0.73601 0.645463 0.183073 -0.0902818
+0.645463 0.73601 0.193291 -0.0656136
+0.543873 0.813963 0.200202 -0.0398226
+0.432976 0.877989 0.203687 -0.0133504
+0.314672 0.926993 0.203687 0.0133504
+0.190983 0.960135 0.200202 0.0398226
+0.064026 0.976849 0.193291 0.0656136
+-0.0640261 0.976849 0.183073 0.0902818
+-0.190983 0.960135 0.169723 0.113405
+-0.314672 0.926992 0.153469 0.134588
+-0.432976 0.877989 0.134588 0.153469
+-0.543873 0.813963 0.113405 0.169723
+-0.645463 0.73601 0.0902818 0.183073
+-0.73601 0.645463 0.0656135 0.193291
+-0.813963 0.543873 0.0398226 0.200202
+-0.877989 0.432976 0.0133503 0.203687
+-0.926993 0.314671 -0.0133504 0.203687
+-0.960135 0.190982 -0.0398227 0.200202
+-0.976849 0.0640259 -0.0656136 0.193291
+0.992648 0.0650616 0.0767343 -0.0672942
+0.975664 0.194072 0.0848615 -0.0567026
+0.941985 0.319761 0.0915367 -0.0451409
+0.892189 0.439979 0.0966457 -0.0328068
+0.827128 0.552669 0.100101 -0.0199113
+0.747914 0.655903 0.101844 -0.00667518
+0.655903 0.747914 0.101844 0.00667518
+0.552669 0.827128 0.100101 0.0199113
+0.439979 0.892189 0.0966457 0.0328068
+0.319761 0.941985 0.0915367 0.0451409
+0.194072 0.975664 0.0848615 0.0567026
+0.0650615 0.992648 0.0767343 0.0672942
+-0.0650616 0.992648 0.0672942 0.0767343
+-0.194072 0.975664 0.0567026 0.0848615
+-0.319761 0.941985 0.0451409 0.0915367
+-0.439979 0.892189 0.0328068 0.0966457
+-0.552669 0.827128 0.0199113 0.100101
+-0.655903 0.747914 0.00667516 0.101844
+-0.747914 0.655903 -0.00667519 0.101844
+-0.827128 0.552669 -0.0199113 0.100101
+-0.892189 0.439979 -0.0328068 0.0966456
+-0.941985 0.319761 -0.0451409 0.0915367
+-0.975664 0.194071 -0.0567027 0.0848615
+-0.992648 0.0650614 -0.0672942 0.0767343
+0.498929 0.0327016 0.864171 0.0566408
+0.490393 0.0975452 0.849385 0.168953
+0.473465 0.16072 0.820066 0.278375
+0.448436 0.221144 0.776715 0.383033
+0.415735 0.277785 0.720074 0.481138
+0.37592 0.329673 0.651112 0.57101
+0.329673 0.37592 0.57101 0.651112
+0.277785 0.415735 0.481138 0.720074
+0.221144 0.448436 0.383033 0.776715
+0.16072 0.473465 0.278375 0.820066
+0.0975452 0.490393 0.168953 0.849385
+0.0327015 0.498929 0.0566407 0.864171
+-0.0327016 0.498929 -0.0566408 0.864171
+-0.0975452 0.490393 -0.168953 0.849385
+-0.16072 0.473465 -0.278375 0.820066
+-0.221144 0.448436 -0.383033 0.776715
+-0.277785 0.415735 -0.481138 0.720074
+-0.329673 0.37592 -0.57101 0.651112
+-0.37592 0.329673 -0.651112 0.57101
+-0.415735 0.277785 -0.720074 0.481138
+-0.448436 0.221144 -0.776715 0.383033
+-0.473465 0.16072 -0.820066 0.278375
+-0.490393 0.097545 -0.849385 0.168953
+-0.498929 0.0327015 -0.864171 0.0566406
+0.576114 0.0377605 0.788675 0.211325
+0.566257 0.112635 0.754344 0.31246
+0.54671 0.185583 0.707107 0.408248
+0.51781 0.255356 0.64777 0.497052
+0.480049 0.320759 0.57735 0.57735
+0.434075 0.380673 0.497052 0.64777
+0.380673 0.434075 0.408248 0.707107
+0.320759 0.480049 0.31246 0.754344
+0.255355 0.51781 0.211325 0.788675
+0.185583 0.54671 0.106574 0.809511
+0.112635 0.566257 2.96048e-08 0.816497
+0.0377605 0.576114 -0.106574 0.809511
+-0.0377605 0.576114 -0.211325 0.788675
+-0.112635 0.566257 -0.31246 0.754344
+-0.185583 0.54671 -0.408248 0.707107
+-0.255356 0.51781 -0.497052 0.64777
+-0.320759 0.480049 -0.57735 0.57735
+-0.380674 0.434075 -0.64777 0.497052
+-0.434075 0.380673 -0.707107 0.408248
+-0.480049 0.320759 -0.754345 0.31246
+-0.51781 0.255355 -0.788675 0.211325
+-0.54671 0.185583 -0.809511 0.106574
+-0.566257 0.112635 -0.816497 -2.00753e-07
+-0.576114 0.0377604 -0.809511 -0.106574
+0.576114 0.0377605 0.809511 -0.106574
+0.566257 0.112635 0.816497 4.46127e-09
+0.54671 0.185583 0.809511 0.106574
+0.51781 0.255356 0.788675 0.211325
+0.480049 0.320759 0.754344 0.31246
+0.434075 0.380673 0.707107 0.408248
+0.380673 0.434075 0.64777 0.497052
+0.320759 0.480049 0.57735 0.57735
+0.255355 0.51781 0.497052 0.64777
+0.185583 0.54671 0.408248 0.707107
+0.112635 0.566257 0.31246 0.754344
+0.0377605 0.576114 0.211325 0.788675
+-0.0377605 0.576114 0.106574 0.809511
+-0.112635 0.566257 -3.65123e-09 0.816497
+-0.185583 0.54671 -0.106574 0.809511
+-0.255356 0.51781 -0.211325 0.788675
+-0.320759 0.480049 -0.31246 0.754344
+-0.380674 0.434075 -0.408248 0.707107
+-0.434075 0.380673 -0.497052 0.64777
+-0.480049 0.320759 -0.57735 0.57735
+-0.51781 0.255355 -0.64777 0.497052
+-0.54671 0.185583 -0.707107 0.408248
+-0.566257 0.112635 -0.754345 0.31246
+-0.576114 0.0377604 -0.788675 0.211325
+0.644115 0.0422175 0.762127 0.0499525
+0.633094 0.12593 0.749087 0.149003
+0.611241 0.207488 0.72323 0.245503
+0.578929 0.285496 0.684998 0.337804
+0.536711 0.358619 0.635045 0.424324
+0.485311 0.425606 0.574227 0.503584
+0.425606 0.485311 0.503584 0.574227
+0.358619 0.536711 0.424324 0.635045
+0.285496 0.578929 0.337804 0.684998
+0.207488 0.611241 0.245503 0.72323
+0.12593 0.633094 0.149003 0.749087
+0.0422175 0.644115 0.0499524 0.762127
+-0.0422176 0.644115 -0.0499525 0.762127
+-0.12593 0.633094 -0.149003 0.749087
+-0.207488 0.611241 -0.245504 0.72323
+-0.285496 0.578929 -0.337804 0.684998
+-0.358619 0.536711 -0.424324 0.635045
+-0.425606 0.48531 -0.503584 0.574227
+-0.485311 0.425606 -0.574227 0.503584
+-0.536711 0.358619 -0.635045 0.424324
+-0.578929 0.285496 -0.684998 0.337804
+-0.611241 0.207488 -0.72323 0.245503
+-0.633094 0.12593 -0.749087 0.149003
+-0.644115 0.0422174 -0.762127 0.0499523
+0.644115 0.0422175 0.684998 0.337804
+0.633094 0.12593 0.635045 0.424324
+0.611241 0.207488 0.574227 0.503584
+0.578929 0.285496 0.503584 0.574227
+0.536711 0.358619 0.424324 0.635045
+0.485311 0.425606 0.337804 0.684998
+0.425606 0.485311 0.245503 0.72323
+0.358619 0.536711 0.149003 0.749087
+0.285496 0.578929 0.0499525 0.762127
+0.207488 0.611241 -0.0499525 0.762127
+0.12593 0.633094 -0.149003 0.749087
+0.0422175 0.644115 -0.245503 0.72323
+-0.0422176 0.644115 -0.337804 0.684998
+-0.12593 0.633094 -0.424324 0.635045
+-0.207488 0.611241 -0.503584 0.574227
+-0.285496 0.578929 -0.574227 0.503584
+-0.358619 0.536711 -0.635045 0.424324
+-0.425606 0.48531 -0.684998 0.337803
+-0.485311 0.425606 -0.72323 0.245503
+-0.536711 0.358619 -0.749087 0.149003
+-0.578929 0.285496 -0.762127 0.0499524
+-0.611241 0.207488 -0.762127 -0.0499527
+-0.633094 0.12593 -0.749087 -0.149003
+-0.644115 0.0422174 -0.72323 -0.245504
+0.705593 0.046247 0.560986 0.430459
+0.69352 0.13795 0.5 0.5
+0.669581 0.227292 0.430459 0.560986
+0.634185 0.312745 0.353553 0.612372
+0.587938 0.392847 0.270598 0.653281
+0.531631 0.466228 0.183013 0.683013
+0.466228 0.531631 0.0922959 0.701057
+0.392847 0.587938 -2.98546e-08 0.707107
+0.312745 0.634185 -0.092296 0.701057
+0.227292 0.669581 -0.183013 0.683013
+0.13795 0.69352 -0.270598 0.653281
+0.046247 0.705593 -0.353553 0.612372
+-0.046247 0.705593 -0.430459 0.560986
+-0.13795 0.69352 -0.5 0.5
+-0.227292 0.669581 -0.560986 0.430459
+-0.312745 0.634185 -0.612373 0.353553
+-0.392848 0.587938 -0.653282 0.270598
+-0.466228 0.531631 -0.683013 0.183013
+-0.531631 0.466228 -0.701057 0.0922959
+-0.587938 0.392847 -0.707107 -6.07632e-08
+-0.634185 0.312745 -0.701057 -0.092296
+-0.669581 0.227292 -0.683013 -0.183013
+-0.69352 0.13795 -0.653281 -0.270598
+-0.705593 0.0462468 -0.612372 -0.353554
+0.705593 0.046247 0.683013 0.183013
+0.69352 0.13795 0.653281 0.270598
+0.669581 0.227292 0.612372 0.353553
+0.634185 0.312745 0.560986 0.430459
+0.587938 0.392847 0.5 0.5
+0.531631 0.466228 0.430459 0.560986
+0.466228 0.531631 0.353553 0.612372
+0.392847 0.587938 0.270598 0.653281
+0.312745 0.634185 0.183013 0.683013
+0.227292 0.669581 0.092296 0.701057
+0.13795 0.69352 2.56385e-08 0.707107
+0.046247 0.705593 -0.092296 0.701057
+-0.046247 0.705593 -0.183013 0.683013
+-0.13795 0.69352 -0.270598 0.653281
+-0.227292 0.669581 -0.353553 0.612372
+-0.312745 0.634185 -0.430459 0.560985
+-0.392848 0.587938 -0.5 0.5
+-0.466228 0.531631 -0.560986 0.430459
+-0.531631 0.466228 -0.612372 0.353553
+-0.587938 0.392847 -0.653282 0.270598
+-0.634185 0.312745 -0.683013 0.183013
+-0.669581 0.227292 -0.701057 0.0922958
+-0.69352 0.13795 -0.707107 -1.73857e-07
+-0.705593 0.0462468 -0.701057 -0.0922961
+0.762127 0.0499525 0.578929 0.285496
+0.749087 0.149003 0.536711 0.358619
+0.72323 0.245503 0.485311 0.425606
+0.684998 0.337804 0.425606 0.485311
+0.635045 0.424324 0.358619 0.536711
+0.574227 0.503584 0.285496 0.578929
+0.503584 0.574227 0.207488 0.611241
+0.424324 0.635045 0.12593 0.633094
+0.337804 0.684998 0.0422175 0.644115
+0.245503 0.72323 -0.0422175 0.644115
+0.149003 0.749087 -0.12593 0.633094
+0.0499524 0.762127 -0.207488 0.611241
+-0.0499525 0.762127 -0.285496 0.578929
+-0.149003 0.749087 -0.358619 0.536711
+-0.245504 0.72323 -0.425606 0.48531
+-0.337804 0.684998 -0.485311 0.425606
+-0.424324 0.635045 -0.536711 0.358619
+-0.503584 0.574227 -0.578929 0.285496
+-0.574227 0.503584 -0.611241 0.207488
+-0.635045 0.424324 -0.633094 0.12593
+-0.684998 0.337804 -0.644115 0.0422175
+-0.72323 0.245503 -0.644115 -0.0422177
+-0.749087 0.149003 -0.633094 -0.12593
+-0.762127 0.0499523 -0.611241 -0.207488
+0.644115 0.0422175 0.72323 -0.245503
+0.633094 0.12593 0.749087 -0.149003
+0.611241 0.207488 0.762127 -0.0499525
+0.578929 0.285496 0.762127 0.0499525
+0.536711 0.358619 0.749087 0.149003
+0.485311 0.425606 0.72323 0.245503
+0.425606 0.485311 0.684998 0.337804
+0.358619 0.536711 0.635045 0.424324
+0.285496 0.578929 0.574227 0.503584
+0.207488 0.611241 0.503584 0.574227
+0.12593 0.633094 0.424324 0.635045
+0.0422175 0.644115 0.337804 0.684998
+-0.0422176 0.644115 0.245503 0.72323
+-0.12593 0.633094 0.149003 0.749087
+-0.207488 0.611241 0.0499524 0.762127
+-0.285496 0.578929 -0.0499526 0.762127
+-0.358619 0.536711 -0.149003 0.749087
+-0.425606 0.48531 -0.245504 0.72323
+-0.485311 0.425606 -0.337804 0.684998
+-0.536711 0.358619 -0.424324 0.635045
+-0.578929 0.285496 -0.503584 0.574227
+-0.611241 0.207488 -0.574227 0.503584
+-0.633094 0.12593 -0.635046 0.424324
+-0.644115 0.0422174 -0.684998 0.337803
+0.705593 0.046247 0.701057 -0.092296
+0.69352 0.13795 0.707107 3.86358e-09
+0.669581 0.227292 0.701057 0.092296
+0.634185 0.312745 0.683013 0.183013
+0.587938 0.392847 0.653281 0.270598
+0.531631 0.466228 0.612372 0.353553
+0.466228 0.531631 0.560986 0.430459
+0.392847 0.587938 0.5 0.5
+0.312745 0.634185 0.430459 0.560986
+0.227292 0.669581 0.353553 0.612372
+0.13795 0.69352 0.270598 0.653281
+0.046247 0.705593 0.183013 0.683013
+-0.046247 0.705593 0.0922959 0.701057
+-0.13795 0.69352 -3.16206e-09 0.707107
+-0.227292 0.669581 -0.092296 0.701057
+-0.312745 0.634185 -0.183013 0.683013
+-0.392848 0.587938 -0.270598 0.653281
+-0.466228 0.531631 -0.353553 0.612372
+-0.531631 0.466228 -0.430459 0.560985
+-0.587938 0.392847 -0.5 0.5
+-0.634185 0.312745 -0.560986 0.430459
+-0.669581 0.227292 -0.612373 0.353553
+-0.69352 0.13795 -0.653282 0.270598
+-0.705593 0.0462468 -0.683013 0.183013
+0.705593 0.046247 0.612372 -0.353553
+0.69352 0.13795 0.653281 -0.270598
+0.669581 0.227292 0.683013 -0.183013
+0.634185 0.312745 0.701057 -0.0922959
+0.587938 0.392847 0.707107 1.05402e-09
+0.531631 0.466228 0.701057 0.0922959
+0.466228 0.531631 0.683013 0.183013
+0.392847 0.587938 0.653281 0.270598
+0.312745 0.634185 0.612372 0.353553
+0.227292 0.669581 0.560986 0.430459
+0.13795 0.69352 0.5 0.5
+0.046247 0.705593 0.430459 0.560986
+-0.046247 0.705593 0.353553 0.612372
+-0.13795 0.69352 0.270598 0.653281
+-0.227292 0.669581 0.183013 0.683013
+-0.312745 0.634185 0.0922958 0.701057
+-0.392848 0.587938 -1.16256e-07 0.707107
+-0.466228 0.531631 -0.0922961 0.701057
+-0.531631 0.466228 -0.183013 0.683013
+-0.587938 0.392847 -0.270598 0.653281
+-0.634185 0.312745 -0.353553 0.612372
+-0.669581 0.227292 -0.430459 0.560985
+-0.69352 0.13795 -0.5 0.5
+-0.705593 0.0462468 -0.560986 0.430459
+0.762127 0.0499525 0.611241 -0.207488
+0.749087 0.149003 0.633094 -0.12593
+0.72323 0.245503 0.644115 -0.0422175
+0.684998 0.337804 0.644115 0.0422176
+0.635045 0.424324 0.633094 0.12593
+0.574227 0.503584 0.611241 0.207488
+0.503584 0.574227 0.578929 0.285496
+0.424324 0.635045 0.536711 0.358619
+0.337804 0.684998 0.485311 0.425606
+0.245503 0.72323 0.425606 0.485311
+0.149003 0.749087 0.358619 0.536711
+0.0499524 0.762127 0.285496 0.578929
+-0.0499525 0.762127 0.207488 0.611241
+-0.149003 0.749087 0.12593 0.633094
+-0.245504 0.72323 0.0422175 0.644115
+-0.337804 0.684998 -0.0422177 0.644115
+-0.424324 0.635045 -0.12593 0.633094
+-0.503584 0.574227 -0.207488 0.611241
+-0.574227 0.503584 -0.285496 0.578929
+-0.635045 0.424324 -0.358619 0.536711
+-0.684998 0.337804 -0.425606 0.48531
+-0.72323 0.245503 -0.485311 0.425606
+-0.749087 0.149003 -0.536711 0.358619
+-0.762127 0.0499523 -0.578929 0.285496
+0.762127 0.0499525 0.644115 0.0422175
+0.749087 0.149003 0.633094 0.12593
+0.72323 0.245503 0.611241 0.207488
+0.684998 0.337804 0.578929 0.285496
+0.635045 0.424324 0.536711 0.358619
+0.574227 0.503584 0.485311 0.425606
+0.503584 0.574227 0.425606 0.485311
+0.424324 0.635045 0.358619 0.536711
+0.337804 0.684998 0.285496 0.578929
+0.245503 0.72323 0.207488 0.611241
+0.149003 0.749087 0.12593 0.633094
+0.0499524 0.762127 0.0422175 0.644115
+-0.0499525 0.762127 -0.0422176 0.644115
+-0.149003 0.749087 -0.12593 0.633094
+-0.245504 0.72323 -0.207488 0.611241
+-0.337804 0.684998 -0.285496 0.578929
+-0.424324 0.635045 -0.358619 0.536711
+-0.503584 0.574227 -0.425606 0.48531
+-0.574227 0.503584 -0.485311 0.425606
+-0.635045 0.424324 -0.536711 0.358619
+-0.684998 0.337804 -0.578929 0.285496
+-0.72323 0.245503 -0.611241 0.207488
+-0.749087 0.149003 -0.633094 0.12593
+-0.762127 0.0499523 -0.644115 0.0422174
+0.814748 0.0534014 0.557678 0.149429
+0.800808 0.159291 0.533402 0.220942
+0.773165 0.262454 0.5 0.288675
+0.732294 0.361127 0.458043 0.351469
+0.678892 0.453621 0.408248 0.408248
+0.613875 0.538354 0.351469 0.458043
+0.538354 0.613875 0.288675 0.5
+0.453621 0.678892 0.220942 0.533402
+0.361127 0.732294 0.149429 0.557678
+0.262454 0.773165 0.0753593 0.572411
+0.159291 0.800808 2.09338e-08 0.57735
+0.0534014 0.814748 -0.0753594 0.572411
+-0.0534015 0.814748 -0.149429 0.557678
+-0.159291 0.800808 -0.220942 0.533402
+-0.262454 0.773165 -0.288675 0.5
+-0.361127 0.732293 -0.351469 0.458043
+-0.453621 0.678892 -0.408248 0.408248
+-0.538354 0.613875 -0.458043 0.351469
+-0.613875 0.538354 -0.5 0.288675
+-0.678892 0.453621 -0.533402 0.220942
+-0.732294 0.361127 -0.557678 0.149429
+-0.773165 0.262454 -0.572411 0.0753592
+-0.800808 0.15929 -0.57735 -1.41954e-07
+-0.814748 0.0534013 -0.572411 -0.0753595
+0.814748 0.0534014 0.572411 -0.0753593
+0.800808 0.159291 0.57735 3.1546e-09
+0.773165 0.262454 0.572411 0.0753593
+0.732294 0.361127 0.557678 0.149429
+0.678892 0.453621 0.533402 0.220942
+0.613875 0.538354 0.5 0.288675
+0.538354 0.613875 0.458043 0.351469
+0.453621 0.678892 0.408248 0.408248
+0.361127 0.732294 0.351469 0.458043
+0.262454 0.773165 0.288675 0.5
+0.159291 0.800808 0.220942 0.533402
+0.0534014 0.814748 0.149429 0.557678
+-0.0534015 0.814748 0.0753593 0.572411
+-0.159291 0.800808 -2.58181e-09 0.57735
+-0.262454 0.773165 -0.0753594 0.572411
+-0.361127 0.732293 -0.149429 0.557678
+-0.453621 0.678892 -0.220942 0.533402
+-0.538354 0.613875 -0.288675 0.5
+-0.613875 0.538354 -0.351469 0.458043
+-0.678892 0.453621 -0.408248 0.408248
+-0.732294 0.361127 -0.458043 0.351469
+-0.773165 0.262454 -0.5 0.288675
+-0.800808 0.15929 -0.533402 0.220942
+-0.814748 0.0534013 -0.557678 0.149429
+0.864171 0.0566408 0.498929 0.0327016
+0.849385 0.168953 0.490393 0.0975452
+0.820066 0.278375 0.473465 0.16072
+0.776715 0.383033 0.448436 0.221144
+0.720074 0.481138 0.415735 0.277785
+0.651112 0.57101 0.37592 0.329673
+0.57101 0.651112 0.329673 0.37592
+0.481138 0.720074 0.277785 0.415735
+0.383033 0.776715 0.221144 0.448436
+0.278375 0.820066 0.16072 0.473465
+0.168953 0.849385 0.0975452 0.490393
+0.0566407 0.864171 0.0327015 0.498929
+-0.0566408 0.864171 -0.0327016 0.498929
+-0.168953 0.849385 -0.0975452 0.490393
+-0.278375 0.820066 -0.16072 0.473465
+-0.383033 0.776715 -0.221144 0.448436
+-0.481138 0.720074 -0.277785 0.415735
+-0.57101 0.651112 -0.329673 0.37592
+-0.651112 0.57101 -0.37592 0.329673
+-0.720074 0.481138 -0.415735 0.277785
+-0.776715 0.383033 -0.448436 0.221144
+-0.820066 0.278375 -0.473465 0.16072
+-0.849385 0.168953 -0.490393 0.097545
+-0.864171 0.0566406 -0.498929 0.0327015
+0.498929 0.0327016 -0.0566408 0.864171
+0.490393 0.0975452 -0.168953 0.849385
+0.473465 0.16072 -0.278375 0.820066
+0.448436 0.221144 -0.383033 0.776715
+0.415735 0.277785 -0.481138 0.720074
+0.37592 0.329673 -0.57101 0.651112
+0.329673 0.37592 -0.651112 0.57101
+0.277785 0.415735 -0.720074 0.481138
+0.221144 0.448436 -0.776715 0.383033
+0.16072 0.473465 -0.820066 0.278375
+0.0975452 0.490393 -0.849385 0.168953
+0.0327015 0.498929 -0.864171 0.0566407
+-0.0327016 0.498929 -0.864171 -0.0566408
+-0.0975452 0.490393 -0.849385 -0.168953
+-0.16072 0.473465 -0.820066 -0.278375
+-0.221144 0.448436 -0.776715 -0.383033
+-0.277785 0.415735 -0.720074 -0.481138
+-0.329673 0.37592 -0.651112 -0.57101
+-0.37592 0.329673 -0.57101 -0.651112
+-0.415735 0.277785 -0.481138 -0.720074
+-0.448436 0.221144 -0.383033 -0.776715
+-0.473465 0.16072 -0.278375 -0.820066
+-0.490393 0.097545 -0.168953 -0.849385
+-0.498929 0.0327015 -0.0566406 -0.864171
+0.576114 0.0377605 -0.211325 0.788675
+0.566257 0.112635 -0.31246 0.754344
+0.54671 0.185583 -0.408248 0.707107
+0.51781 0.255356 -0.497052 0.64777
+0.480049 0.320759 -0.57735 0.57735
+0.434075 0.380673 -0.64777 0.497052
+0.380673 0.434075 -0.707107 0.408248
+0.320759 0.480049 -0.754344 0.31246
+0.255355 0.51781 -0.788675 0.211325
+0.185583 0.54671 -0.809511 0.106574
+0.112635 0.566257 -0.816497 2.96048e-08
+0.0377605 0.576114 -0.809511 -0.106574
+-0.0377605 0.576114 -0.788675 -0.211325
+-0.112635 0.566257 -0.754344 -0.31246
+-0.185583 0.54671 -0.707107 -0.408248
+-0.255356 0.51781 -0.64777 -0.497052
+-0.320759 0.480049 -0.57735 -0.57735
+-0.380674 0.434075 -0.497052 -0.64777
+-0.434075 0.380673 -0.408248 -0.707107
+-0.480049 0.320759 -0.31246 -0.754345
+-0.51781 0.255355 -0.211325 -0.788675
+-0.54671 0.185583 -0.106574 -0.809511
+-0.566257 0.112635 2.00753e-07 -0.816497
+-0.576114 0.0377604 0.106574 -0.809511
+0.576114 0.0377605 0.106574 0.809511
+0.566257 0.112635 -4.46128e-09 0.816497
+0.54671 0.185583 -0.106574 0.809511
+0.51781 0.255356 -0.211325 0.788675
+0.480049 0.320759 -0.31246 0.754344
+0.434075 0.380673 -0.408248 0.707107
+0.380673 0.434075 -0.497052 0.64777
+0.320759 0.480049 -0.57735 0.57735
+0.255355 0.51781 -0.64777 0.497052
+0.185583 0.54671 -0.707107 0.408248
+0.112635 0.566257 -0.754344 0.31246
+0.0377605 0.576114 -0.788675 0.211325
+-0.0377605 0.576114 -0.809511 0.106574
+-0.112635 0.566257 -0.816497 -3.65123e-09
+-0.185583 0.54671 -0.809511 -0.106574
+-0.255356 0.51781 -0.788675 -0.211325
+-0.320759 0.480049 -0.754344 -0.31246
+-0.380674 0.434075 -0.707107 -0.408248
+-0.434075 0.380673 -0.64777 -0.497052
+-0.480049 0.320759 -0.57735 -0.57735
+-0.51781 0.255355 -0.497052 -0.64777
+-0.54671 0.185583 -0.408248 -0.707107
+-0.566257 0.112635 -0.31246 -0.754345
+-0.576114 0.0377604 -0.211325 -0.788675
+0.644115 0.0422175 -0.0499525 0.762127
+0.633094 0.12593 -0.149003 0.749087
+0.611241 0.207488 -0.245503 0.72323
+0.578929 0.285496 -0.337804 0.684998
+0.536711 0.358619 -0.424324 0.635045
+0.485311 0.425606 -0.503584 0.574227
+0.425606 0.485311 -0.574227 0.503584
+0.358619 0.536711 -0.635045 0.424324
+0.285496 0.578929 -0.684998 0.337804
+0.207488 0.611241 -0.72323 0.245503
+0.12593 0.633094 -0.749087 0.149003
+0.0422175 0.644115 -0.762127 0.0499524
+-0.0422176 0.644115 -0.762127 -0.0499525
+-0.12593 0.633094 -0.749087 -0.149003
+-0.207488 0.611241 -0.72323 -0.245504
+-0.285496 0.578929 -0.684998 -0.337804
+-0.358619 0.536711 -0.635045 -0.424324
+-0.425606 0.48531 -0.574227 -0.503584
+-0.485311 0.425606 -0.503584 -0.574227
+-0.536711 0.358619 -0.424324 -0.635045
+-0.578929 0.285496 -0.337804 -0.684998
+-0.611241 0.207488 -0.245503 -0.72323
+-0.633094 0.12593 -0.149003 -0.749087
+-0.644115 0.0422174 -0.0499523 -0.762127
+0.644115 0.0422175 -0.337804 0.684998
+0.633094 0.12593 -0.424324 0.635045
+0.611241 0.207488 -0.503584 0.574227
+0.578929 0.285496 -0.574227 0.503584
+0.536711 0.358619 -0.635045 0.424324
+0.485311 0.425606 -0.684998 0.337804
+0.425606 0.485311 -0.72323 0.245503
+0.358619 0.536711 -0.749087 0.149003
+0.285496 0.578929 -0.762127 0.0499525
+0.207488 0.611241 -0.762127 -0.0499525
+0.12593 0.633094 -0.749087 -0.149003
+0.0422175 0.644115 -0.72323 -0.245503
+-0.0422176 0.644115 -0.684998 -0.337804
+-0.12593 0.633094 -0.635045 -0.424324
+-0.207488 0.611241 -0.574227 -0.503584
+-0.285496 0.578929 -0.503584 -0.574227
+-0.358619 0.536711 -0.424324 -0.635045
+-0.425606 0.48531 -0.337803 -0.684998
+-0.485311 0.425606 -0.245503 -0.72323
+-0.536711 0.358619 -0.149003 -0.749087
+-0.578929 0.285496 -0.0499524 -0.762127
+-0.611241 0.207488 0.0499527 -0.762127
+-0.633094 0.12593 0.149003 -0.749087
+-0.644115 0.0422174 0.245504 -0.72323
+0.705593 0.046247 -0.430459 0.560986
+0.69352 0.13795 -0.5 0.5
+0.669581 0.227292 -0.560986 0.430459
+0.634185 0.312745 -0.612372 0.353553
+0.587938 0.392847 -0.653281 0.270598
+0.531631 0.466228 -0.683013 0.183013
+0.466228 0.531631 -0.701057 0.0922959
+0.392847 0.587938 -0.707107 -2.98546e-08
+0.312745 0.634185 -0.701057 -0.092296
+0.227292 0.669581 -0.683013 -0.183013
+0.13795 0.69352 -0.653281 -0.270598
+0.046247 0.705593 -0.612372 -0.353553
+-0.046247 0.705593 -0.560986 -0.430459
+-0.13795 0.69352 -0.5 -0.5
+-0.227292 0.669581 -0.430459 -0.560986
+-0.312745 0.634185 -0.353553 -0.612373
+-0.392848 0.587938 -0.270598 -0.653282
+-0.466228 0.531631 -0.183013 -0.683013
+-0.531631 0.466228 -0.0922959 -0.701057
+-0.587938 0.392847 6.07632e-08 -0.707107
+-0.634185 0.312745 0.092296 -0.701057
+-0.669581 0.227292 0.183013 -0.683013
+-0.69352 0.13795 0.270598 -0.653281
+-0.705593 0.0462468 0.353554 -0.612372
+0.705593 0.046247 -0.183013 0.683013
+0.69352 0.13795 -0.270598 0.653281
+0.669581 0.227292 -0.353553 0.612372
+0.634185 0.312745 -0.430459 0.560986
+0.587938 0.392847 -0.5 0.5
+0.531631 0.466228 -0.560986 0.430459
+0.466228 0.531631 -0.612372 0.353553
+0.392847 0.587938 -0.653281 0.270598
+0.312745 0.634185 -0.683013 0.183013
+0.227292 0.669581 -0.701057 0.092296
+0.13795 0.69352 -0.707107 2.56385e-08
+0.046247 0.705593 -0.701057 -0.092296
+-0.046247 0.705593 -0.683013 -0.183013
+-0.13795 0.69352 -0.653281 -0.270598
+-0.227292 0.669581 -0.612372 -0.353553
+-0.312745 0.634185 -0.560985 -0.430459
+-0.392848 0.587938 -0.5 -0.5
+-0.466228 0.531631 -0.430459 -0.560986
+-0.531631 0.466228 -0.353553 -0.612372
+-0.587938 0.392847 -0.270598 -0.653282
+-0.634185 0.312745 -0.183013 -0.683013
+-0.669581 0.227292 -0.0922958 -0.701057
+-0.69352 0.13795 1.73857e-07 -0.707107
+-0.705593 0.0462468 0.0922961 -0.701057
+0.762127 0.0499525 -0.285496 0.578929
+0.749087 0.149003 -0.358619 0.536711
+0.72323 0.245503 -0.425606 0.485311
+0.684998 0.337804 -0.485311 0.425606
+0.635045 0.424324 -0.536711 0.358619
+0.574227 0.503584 -0.578929 0.285496
+0.503584 0.574227 -0.611241 0.207488
+0.424324 0.635045 -0.633094 0.12593
+0.337804 0.684998 -0.644115 0.0422175
+0.245503 0.72323 -0.644115 -0.0422175
+0.149003 0.749087 -0.633094 -0.12593
+0.0499524 0.762127 -0.611241 -0.207488
+-0.0499525 0.762127 -0.578929 -0.285496
+-0.149003 0.749087 -0.536711 -0.358619
+-0.245504 0.72323 -0.48531 -0.425606
+-0.337804 0.684998 -0.425606 -0.485311
+-0.424324 0.635045 -0.358619 -0.536711
+-0.503584 0.574227 -0.285496 -0.578929
+-0.574227 0.503584 -0.207488 -0.611241
+-0.635045 0.424324 -0.12593 -0.633094
+-0.684998 0.337804 -0.0422175 -0.644115
+-0.72323 0.245503 0.0422177 -0.644115
+-0.749087 0.149003 0.12593 -0.633094
+-0.762127 0.0499523 0.207488 -0.611241
+0.644115 0.0422175 0.245503 0.72323
+0.633094 0.12593 0.149003 0.749087
+0.611241 0.207488 0.0499525 0.762127
+0.578929 0.285496 -0.0499525 0.762127
+0.536711 0.358619 -0.149003 0.749087
+0.485311 0.425606 -0.245503 0.72323
+0.425606 0.485311 -0.337804 0.684998
+0.358619 0.536711 -0.424324 0.635045
+0.285496 0.578929 -0.503584 0.574227
+0.207488 0.611241 -0.574227 0.503584
+0.12593 0.633094 -0.635045 0.424324
+0.0422175 0.644115 -0.684998 0.337804
+-0.0422176 0.644115 -0.72323 0.245503
+-0.12593 0.633094 -0.749087 0.149003
+-0.207488 0.611241 -0.762127 0.0499524
+-0.285496 0.578929 -0.762127 -0.0499526
+-0.358619 0.536711 -0.749087 -0.149003
+-0.425606 0.48531 -0.72323 -0.245504
+-0.485311 0.425606 -0.684998 -0.337804
+-0.536711 0.358619 -0.635045 -0.424324
+-0.578929 0.285496 -0.574227 -0.503584
+-0.611241 0.207488 -0.503584 -0.574227
+-0.633094 0.12593 -0.424324 -0.635046
+-0.644115 0.0422174 -0.337803 -0.684998
+0.705593 0.046247 0.092296 0.701057
+0.69352 0.13795 -3.86358e-09 0.707107
+0.669581 0.227292 -0.092296 0.701057
+0.634185 0.312745 -0.183013 0.683013
+0.587938 0.392847 -0.270598 0.653281
+0.531631 0.466228 -0.353553 0.612372
+0.466228 0.531631 -0.430459 0.560986
+0.392847 0.587938 -0.5 0.5
+0.312745 0.634185 -0.560986 0.430459
+0.227292 0.669581 -0.612372 0.353553
+0.13795 0.69352 -0.653281 0.270598
+0.046247 0.705593 -0.683013 0.183013
+-0.046247 0.705593 -0.701057 0.0922959
+-0.13795 0.69352 -0.707107 -3.16206e-09
+-0.227292 0.669581 -0.701057 -0.092296
+-0.312745 0.634185 -0.683013 -0.183013
+-0.392848 0.587938 -0.653281 -0.270598
+-0.466228 0.531631 -0.612372 -0.353553
+-0.531631 0.466228 -0.560985 -0.430459
+-0.587938 0.392847 -0.5 -0.5
+-0.634185 0.312745 -0.430459 -0.560986
+-0.669581 0.227292 -0.353553 -0.612373
+-0.69352 0.13795 -0.270598 -0.653282
+-0.705593 0.0462468 -0.183013 -0.683013
+0.705593 0.046247 0.353553 0.612372
+0.69352 0.13795 0.270598 0.653281
+0.669581 0.227292 0.183013 0.683013
+0.634185 0.312745 0.0922959 0.701057
+0.587938 0.392847 -1.05402e-09 0.707107
+0.531631 0.466228 -0.0922959 0.701057
+0.466228 0.531631 -0.183013 0.683013
+0.392847 0.587938 -0.270598 0.653281
+0.312745 0.634185 -0.353553 0.612372
+0.227292 0.669581 -0.430459 0.560986
+0.13795 0.69352 -0.5 0.5
+0.046247 0.705593 -0.560986 0.430459
+-0.046247 0.705593 -0.612372 0.353553
+-0.13795 0.69352 -0.653281 0.270598
+-0.227292 0.669581 -0.683013 0.183013
+-0.312745 0.634185 -0.701057 0.0922958
+-0.392848 0.587938 -0.707107 -1.16256e-07
+-0.466228 0.531631 -0.701057 -0.0922961
+-0.531631 0.466228 -0.683013 -0.183013
+-0.587938 0.392847 -0.653281 -0.270598
+-0.634185 0.312745 -0.612372 -0.353553
+-0.669581 0.227292 -0.560985 -0.430459
+-0.69352 0.13795 -0.5 -0.5
+-0.705593 0.0462468 -0.430459 -0.560986
+0.762127 0.0499525 0.207488 0.611241
+0.749087 0.149003 0.12593 0.633094
+0.72323 0.245503 0.0422175 0.644115
+0.684998 0.337804 -0.0422176 0.644115
+0.635045 0.424324 -0.12593 0.633094
+0.574227 0.503584 -0.207488 0.611241
+0.503584 0.574227 -0.285496 0.578929
+0.424324 0.635045 -0.358619 0.536711
+0.337804 0.684998 -0.425606 0.485311
+0.245503 0.72323 -0.485311 0.425606
+0.149003 0.749087 -0.536711 0.358619
+0.0499524 0.762127 -0.578929 0.285496
+-0.0499525 0.762127 -0.611241 0.207488
+-0.149003 0.749087 -0.633094 0.12593
+-0.245504 0.72323 -0.644115 0.0422175
+-0.337804 0.684998 -0.644115 -0.0422177
+-0.424324 0.635045 -0.633094 -0.12593
+-0.503584 0.574227 -0.611241 -0.207488
+-0.574227 0.503584 -0.578929 -0.285496
+-0.635045 0.424324 -0.536711 -0.358619
+-0.684998 0.337804 -0.48531 -0.425606
+-0.72323 0.245503 -0.425606 -0.485311
+-0.749087 0.149003 -0.358619 -0.536711
+-0.762127 0.0499523 -0.285496 -0.578929
+0.762127 0.0499525 -0.0422175 0.644115
+0.749087 0.149003 -0.12593 0.633094
+0.72323 0.245503 -0.207488 0.611241
+0.684998 0.337804 -0.285496 0.578929
+0.635045 0.424324 -0.358619 0.536711
+0.574227 0.503584 -0.425606 0.485311
+0.503584 0.574227 -0.485311 0.425606
+0.424324 0.635045 -0.536711 0.358619
+0.337804 0.684998 -0.578929 0.285496
+0.245503 0.72323 -0.611241 0.207488
+0.149003 0.749087 -0.633094 0.12593
+0.0499524 0.762127 -0.644115 0.0422175
+-0.0499525 0.762127 -0.644115 -0.0422176
+-0.149003 0.749087 -0.633094 -0.12593
+-0.245504 0.72323 -0.611241 -0.207488
+-0.337804 0.684998 -0.578929 -0.285496
+-0.424324 0.635045 -0.536711 -0.358619
+-0.503584 0.574227 -0.48531 -0.425606
+-0.574227 0.503584 -0.425606 -0.485311
+-0.635045 0.424324 -0.358619 -0.536711
+-0.684998 0.337804 -0.285496 -0.578929
+-0.72323 0.245503 -0.207488 -0.611241
+-0.749087 0.149003 -0.12593 -0.633094
+-0.762127 0.0499523 -0.0422174 -0.644115
+0.814748 0.0534014 -0.149429 0.557678
+0.800808 0.159291 -0.220942 0.533402
+0.773165 0.262454 -0.288675 0.5
+0.732294 0.361127 -0.351469 0.458043
+0.678892 0.453621 -0.408248 0.408248
+0.613875 0.538354 -0.458043 0.351469
+0.538354 0.613875 -0.5 0.288675
+0.453621 0.678892 -0.533402 0.220942
+0.361127 0.732294 -0.557678 0.149429
+0.262454 0.773165 -0.572411 0.0753593
+0.159291 0.800808 -0.57735 2.09338e-08
+0.0534014 0.814748 -0.572411 -0.0753594
+-0.0534015 0.814748 -0.557678 -0.149429
+-0.159291 0.800808 -0.533402 -0.220942
+-0.262454 0.773165 -0.5 -0.288675
+-0.361127 0.732293 -0.458043 -0.351469
+-0.453621 0.678892 -0.408248 -0.408248
+-0.538354 0.613875 -0.351469 -0.458043
+-0.613875 0.538354 -0.288675 -0.5
+-0.678892 0.453621 -0.220942 -0.533402
+-0.732294 0.361127 -0.149429 -0.557678
+-0.773165 0.262454 -0.0753592 -0.572411
+-0.800808 0.15929 1.41954e-07 -0.57735
+-0.814748 0.0534013 0.0753595 -0.572411
+0.814748 0.0534014 0.0753593 0.572411
+0.800808 0.159291 -3.1546e-09 0.57735
+0.773165 0.262454 -0.0753593 0.572411
+0.732294 0.361127 -0.149429 0.557678
+0.678892 0.453621 -0.220942 0.533402
+0.613875 0.538354 -0.288675 0.5
+0.538354 0.613875 -0.351469 0.458043
+0.453621 0.678892 -0.408248 0.408248
+0.361127 0.732294 -0.458043 0.351469
+0.262454 0.773165 -0.5 0.288675
+0.159291 0.800808 -0.533402 0.220942
+0.0534014 0.814748 -0.557678 0.149429
+-0.0534015 0.814748 -0.572411 0.0753593
+-0.159291 0.800808 -0.57735 -2.58181e-09
+-0.262454 0.773165 -0.572411 -0.0753594
+-0.361127 0.732293 -0.557678 -0.149429
+-0.453621 0.678892 -0.533402 -0.220942
+-0.538354 0.613875 -0.5 -0.288675
+-0.613875 0.538354 -0.458043 -0.351469
+-0.678892 0.453621 -0.408248 -0.408248
+-0.732294 0.361127 -0.351469 -0.458043
+-0.773165 0.262454 -0.288675 -0.5
+-0.800808 0.15929 -0.220942 -0.533402
+-0.814748 0.0534013 -0.149429 -0.557678
+0.864171 0.0566408 -0.0327016 0.498929
+0.849385 0.168953 -0.0975452 0.490393
+0.820066 0.278375 -0.16072 0.473465
+0.776715 0.383033 -0.221144 0.448436
+0.720074 0.481138 -0.277785 0.415735
+0.651112 0.57101 -0.329673 0.37592
+0.57101 0.651112 -0.37592 0.329673
+0.481138 0.720074 -0.415735 0.277785
+0.383033 0.776715 -0.448436 0.221144
+0.278375 0.820066 -0.473465 0.16072
+0.168953 0.849385 -0.490393 0.0975452
+0.0566407 0.864171 -0.498929 0.0327015
+-0.0566408 0.864171 -0.498929 -0.0327016
+-0.168953 0.849385 -0.490393 -0.0975452
+-0.278375 0.820066 -0.473465 -0.16072
+-0.383033 0.776715 -0.448436 -0.221144
+-0.481138 0.720074 -0.415735 -0.277785
+-0.57101 0.651112 -0.37592 -0.329673
+-0.651112 0.57101 -0.329673 -0.37592
+-0.720074 0.481138 -0.277785 -0.415735
+-0.776715 0.383033 -0.221144 -0.448436
+-0.820066 0.278375 -0.16072 -0.473465
+-0.849385 0.168953 -0.097545 -0.490393
+-0.864171 0.0566406 -0.0327015 -0.498929
+0.498929 0.0327016 -0.864171 -0.0566408
+0.490393 0.0975452 -0.849385 -0.168953
+0.473465 0.16072 -0.820066 -0.278375
+0.448436 0.221144 -0.776715 -0.383033
+0.415735 0.277785 -0.720074 -0.481138
+0.37592 0.329673 -0.651112 -0.57101
+0.329673 0.37592 -0.57101 -0.651112
+0.277785 0.415735 -0.481138 -0.720074
+0.221144 0.448436 -0.383033 -0.776715
+0.16072 0.473465 -0.278375 -0.820066
+0.0975452 0.490393 -0.168953 -0.849385
+0.0327015 0.498929 -0.0566407 -0.864171
+-0.0327016 0.498929 0.0566408 -0.864171
+-0.0975452 0.490393 0.168953 -0.849385
+-0.16072 0.473465 0.278375 -0.820066
+-0.221144 0.448436 0.383033 -0.776715
+-0.277785 0.415735 0.481138 -0.720074
+-0.329673 0.37592 0.57101 -0.651112
+-0.37592 0.329673 0.651112 -0.57101
+-0.415735 0.277785 0.720074 -0.481138
+-0.448436 0.221144 0.776715 -0.383033
+-0.473465 0.16072 0.820066 -0.278375
+-0.490393 0.097545 0.849385 -0.168953
+-0.498929 0.0327015 0.864171 -0.0566406
+0.576114 0.0377605 -0.788675 -0.211325
+0.566257 0.112635 -0.754344 -0.31246
+0.54671 0.185583 -0.707107 -0.408248
+0.51781 0.255356 -0.64777 -0.497052
+0.480049 0.320759 -0.57735 -0.57735
+0.434075 0.380673 -0.497052 -0.64777
+0.380673 0.434075 -0.408248 -0.707107
+0.320759 0.480049 -0.31246 -0.754344
+0.255355 0.51781 -0.211325 -0.788675
+0.185583 0.54671 -0.106574 -0.809511
+0.112635 0.566257 -2.96048e-08 -0.816497
+0.0377605 0.576114 0.106574 -0.809511
+-0.0377605 0.576114 0.211325 -0.788675
+-0.112635 0.566257 0.31246 -0.754344
+-0.185583 0.54671 0.408248 -0.707107
+-0.255356 0.51781 0.497052 -0.64777
+-0.320759 0.480049 0.57735 -0.57735
+-0.380674 0.434075 0.64777 -0.497052
+-0.434075 0.380673 0.707107 -0.408248
+-0.480049 0.320759 0.754345 -0.31246
+-0.51781 0.255355 0.788675 -0.211325
+-0.54671 0.185583 0.809511 -0.106574
+-0.566257 0.112635 0.816497 2.00753e-07
+-0.576114 0.0377604 0.809511 0.106574
+0.576114 0.0377605 -0.809511 0.106574
+0.566257 0.112635 -0.816497 -4.46127e-09
+0.54671 0.185583 -0.809511 -0.106574
+0.51781 0.255356 -0.788675 -0.211325
+0.480049 0.320759 -0.754344 -0.31246
+0.434075 0.380673 -0.707107 -0.408248
+0.380673 0.434075 -0.64777 -0.497052
+0.320759 0.480049 -0.57735 -0.57735
+0.255355 0.51781 -0.497052 -0.64777
+0.185583 0.54671 -0.408248 -0.707107
+0.112635 0.566257 -0.31246 -0.754344
+0.0377605 0.576114 -0.211325 -0.788675
+-0.0377605 0.576114 -0.106574 -0.809511
+-0.112635 0.566257 3.65123e-09 -0.816497
+-0.185583 0.54671 0.106574 -0.809511
+-0.255356 0.51781 0.211325 -0.788675
+-0.320759 0.480049 0.31246 -0.754344
+-0.380674 0.434075 0.408248 -0.707107
+-0.434075 0.380673 0.497052 -0.64777
+-0.480049 0.320759 0.57735 -0.57735
+-0.51781 0.255355 0.64777 -0.497052
+-0.54671 0.185583 0.707107 -0.408248
+-0.566257 0.112635 0.754345 -0.31246
+-0.576114 0.0377604 0.788675 -0.211325
+0.644115 0.0422175 -0.762127 -0.0499525
+0.633094 0.12593 -0.749087 -0.149003
+0.611241 0.207488 -0.72323 -0.245503
+0.578929 0.285496 -0.684998 -0.337804
+0.536711 0.358619 -0.635045 -0.424324
+0.485311 0.425606 -0.574227 -0.503584
+0.425606 0.485311 -0.503584 -0.574227
+0.358619 0.536711 -0.424324 -0.635045
+0.285496 0.578929 -0.337804 -0.684998
+0.207488 0.611241 -0.245503 -0.72323
+0.12593 0.633094 -0.149003 -0.749087
+0.0422175 0.644115 -0.0499524 -0.762127
+-0.0422176 0.644115 0.0499525 -0.762127
+-0.12593 0.633094 0.149003 -0.749087
+-0.207488 0.611241 0.245504 -0.72323
+-0.285496 0.578929 0.337804 -0.684998
+-0.358619 0.536711 0.424324 -0.635045
+-0.425606 0.48531 0.503584 -0.574227
+-0.485311 0.425606 0.574227 -0.503584
+-0.536711 0.358619 0.635045 -0.424324
+-0.578929 0.285496 0.684998 -0.337804
+-0.611241 0.207488 0.72323 -0.245503
+-0.633094 0.12593 0.749087 -0.149003
+-0.644115 0.0422174 0.762127 -0.0499523
+0.644115 0.0422175 -0.684998 -0.337804
+0.633094 0.12593 -0.635045 -0.424324
+0.611241 0.207488 -0.574227 -0.503584
+0.578929 0.285496 -0.503584 -0.574227
+0.536711 0.358619 -0.424324 -0.635045
+0.485311 0.425606 -0.337804 -0.684998
+0.425606 0.485311 -0.245503 -0.72323
+0.358619 0.536711 -0.149003 -0.749087
+0.285496 0.578929 -0.0499525 -0.762127
+0.207488 0.611241 0.0499525 -0.762127
+0.12593 0.633094 0.149003 -0.749087
+0.0422175 0.644115 0.245503 -0.72323
+-0.0422176 0.644115 0.337804 -0.684998
+-0.12593 0.633094 0.424324 -0.635045
+-0.207488 0.611241 0.503584 -0.574227
+-0.285496 0.578929 0.574227 -0.503584
+-0.358619 0.536711 0.635045 -0.424324
+-0.425606 0.48531 0.684998 -0.337803
+-0.485311 0.425606 0.72323 -0.245503
+-0.536711 0.358619 0.749087 -0.149003
+-0.578929 0.285496 0.762127 -0.0499524
+-0.611241 0.207488 0.762127 0.0499527
+-0.633094 0.12593 0.749087 0.149003
+-0.644115 0.0422174 0.72323 0.245504
+0.705593 0.046247 -0.560986 -0.430459
+0.69352 0.13795 -0.5 -0.5
+0.669581 0.227292 -0.430459 -0.560986
+0.634185 0.312745 -0.353553 -0.612372
+0.587938 0.392847 -0.270598 -0.653281
+0.531631 0.466228 -0.183013 -0.683013
+0.466228 0.531631 -0.0922959 -0.701057
+0.392847 0.587938 2.98546e-08 -0.707107
+0.312745 0.634185 0.092296 -0.701057
+0.227292 0.669581 0.183013 -0.683013
+0.13795 0.69352 0.270598 -0.653281
+0.046247 0.705593 0.353553 -0.612372
+-0.046247 0.705593 0.430459 -0.560986
+-0.13795 0.69352 0.5 -0.5
+-0.227292 0.669581 0.560986 -0.430459
+-0.312745 0.634185 0.612373 -0.353553
+-0.392848 0.587938 0.653282 -0.270598
+-0.466228 0.531631 0.683013 -0.183013
+-0.531631 0.466228 0.701057 -0.0922959
+-0.587938 0.392847 0.707107 6.07632e-08
+-0.634185 0.312745 0.701057 0.092296
+-0.669581 0.227292 0.683013 0.183013
+-0.69352 0.13795 0.653281 0.270598
+-0.705593 0.0462468 0.612372 0.353554
+0.705593 0.046247 -0.683013 -0.183013
+0.69352 0.13795 -0.653281 -0.270598
+0.669581 0.227292 -0.612372 -0.353553
+0.634185 0.312745 -0.560986 -0.430459
+0.587938 0.392847 -0.5 -0.5
+0.531631 0.466228 -0.430459 -0.560986
+0.466228 0.531631 -0.353553 -0.612372
+0.392847 0.587938 -0.270598 -0.653281
+0.312745 0.634185 -0.183013 -0.683013
+0.227292 0.669581 -0.092296 -0.701057
+0.13795 0.69352 -2.56385e-08 -0.707107
+0.046247 0.705593 0.092296 -0.701057
+-0.046247 0.705593 0.183013 -0.683013
+-0.13795 0.69352 0.270598 -0.653281
+-0.227292 0.669581 0.353553 -0.612372
+-0.312745 0.634185 0.430459 -0.560985
+-0.392848 0.587938 0.5 -0.5
+-0.466228 0.531631 0.560986 -0.430459
+-0.531631 0.466228 0.612372 -0.353553
+-0.587938 0.392847 0.653282 -0.270598
+-0.634185 0.312745 0.683013 -0.183013
+-0.669581 0.227292 0.701057 -0.0922958
+-0.69352 0.13795 0.707107 1.73857e-07
+-0.705593 0.0462468 0.701057 0.0922961
+0.762127 0.0499525 -0.578929 -0.285496
+0.749087 0.149003 -0.536711 -0.358619
+0.72323 0.245503 -0.485311 -0.425606
+0.684998 0.337804 -0.425606 -0.485311
+0.635045 0.424324 -0.358619 -0.536711
+0.574227 0.503584 -0.285496 -0.578929
+0.503584 0.574227 -0.207488 -0.611241
+0.424324 0.635045 -0.12593 -0.633094
+0.337804 0.684998 -0.0422175 -0.644115
+0.245503 0.72323 0.0422175 -0.644115
+0.149003 0.749087 0.12593 -0.633094
+0.0499524 0.762127 0.207488 -0.611241
+-0.0499525 0.762127 0.285496 -0.578929
+-0.149003 0.749087 0.358619 -0.536711
+-0.245504 0.72323 0.425606 -0.48531
+-0.337804 0.684998 0.485311 -0.425606
+-0.424324 0.635045 0.536711 -0.358619
+-0.503584 0.574227 0.578929 -0.285496
+-0.574227 0.503584 0.611241 -0.207488
+-0.635045 0.424324 0.633094 -0.12593
+-0.684998 0.337804 0.644115 -0.0422175
+-0.72323 0.245503 0.644115 0.0422177
+-0.749087 0.149003 0.633094 0.12593
+-0.762127 0.0499523 0.611241 0.207488
+0.644115 0.0422175 -0.72323 0.245503
+0.633094 0.12593 -0.749087 0.149003
+0.611241 0.207488 -0.762127 0.0499525
+0.578929 0.285496 -0.762127 -0.0499525
+0.536711 0.358619 -0.749087 -0.149003
+0.485311 0.425606 -0.72323 -0.245503
+0.425606 0.485311 -0.684998 -0.337804
+0.358619 0.536711 -0.635045 -0.424324
+0.285496 0.578929 -0.574227 -0.503584
+0.207488 0.611241 -0.503584 -0.574227
+0.12593 0.633094 -0.424324 -0.635045
+0.0422175 0.644115 -0.337804 -0.684998
+-0.0422176 0.644115 -0.245503 -0.72323
+-0.12593 0.633094 -0.149003 -0.749087
+-0.207488 0.611241 -0.0499524 -0.762127
+-0.285496 0.578929 0.0499526 -0.762127
+-0.358619 0.536711 0.149003 -0.749087
+-0.425606 0.48531 0.245504 -0.72323
+-0.485311 0.425606 0.337804 -0.684998
+-0.536711 0.358619 0.424324 -0.635045
+-0.578929 0.285496 0.503584 -0.574227
+-0.611241 0.207488 0.574227 -0.503584
+-0.633094 0.12593 0.635046 -0.424324
+-0.644115 0.0422174 0.684998 -0.337803
+0.705593 0.046247 -0.701057 0.092296
+0.69352 0.13795 -0.707107 -3.86358e-09
+0.669581 0.227292 -0.701057 -0.092296
+0.634185 0.312745 -0.683013 -0.183013
+0.587938 0.392847 -0.653281 -0.270598
+0.531631 0.466228 -0.612372 -0.353553
+0.466228 0.531631 -0.560986 -0.430459
+0.392847 0.587938 -0.5 -0.5
+0.312745 0.634185 -0.430459 -0.560986
+0.227292 0.669581 -0.353553 -0.612372
+0.13795 0.69352 -0.270598 -0.653281
+0.046247 0.705593 -0.183013 -0.683013
+-0.046247 0.705593 -0.0922959 -0.701057
+-0.13795 0.69352 3.16206e-09 -0.707107
+-0.227292 0.669581 0.092296 -0.701057
+-0.312745 0.634185 0.183013 -0.683013
+-0.392848 0.587938 0.270598 -0.653281
+-0.466228 0.531631 0.353553 -0.612372
+-0.531631 0.466228 0.430459 -0.560985
+-0.587938 0.392847 0.5 -0.5
+-0.634185 0.312745 0.560986 -0.430459
+-0.669581 0.227292 0.612373 -0.353553
+-0.69352 0.13795 0.653282 -0.270598
+-0.705593 0.0462468 0.683013 -0.183013
+0.705593 0.046247 -0.612372 0.353553
+0.69352 0.13795 -0.653281 0.270598
+0.669581 0.227292 -0.683013 0.183013
+0.634185 0.312745 -0.701057 0.0922959
+0.587938 0.392847 -0.707107 -1.05402e-09
+0.531631 0.466228 -0.701057 -0.0922959
+0.466228 0.531631 -0.683013 -0.183013
+0.392847 0.587938 -0.653281 -0.270598
+0.312745 0.634185 -0.612372 -0.353553
+0.227292 0.669581 -0.560986 -0.430459
+0.13795 0.69352 -0.5 -0.5
+0.046247 0.705593 -0.430459 -0.560986
+-0.046247 0.705593 -0.353553 -0.612372
+-0.13795 0.69352 -0.270598 -0.653281
+-0.227292 0.669581 -0.183013 -0.683013
+-0.312745 0.634185 -0.0922958 -0.701057
+-0.392848 0.587938 1.16256e-07 -0.707107
+-0.466228 0.531631 0.0922961 -0.701057
+-0.531631 0.466228 0.183013 -0.683013
+-0.587938 0.392847 0.270598 -0.653281
+-0.634185 0.312745 0.353553 -0.612372
+-0.669581 0.227292 0.430459 -0.560985
+-0.69352 0.13795 0.5 -0.5
+-0.705593 0.0462468 0.560986 -0.430459
+0.762127 0.0499525 -0.611241 0.207488
+0.749087 0.149003 -0.633094 0.12593
+0.72323 0.245503 -0.644115 0.0422175
+0.684998 0.337804 -0.644115 -0.0422176
+0.635045 0.424324 -0.633094 -0.12593
+0.574227 0.503584 -0.611241 -0.207488
+0.503584 0.574227 -0.578929 -0.285496
+0.424324 0.635045 -0.536711 -0.358619
+0.337804 0.684998 -0.485311 -0.425606
+0.245503 0.72323 -0.425606 -0.485311
+0.149003 0.749087 -0.358619 -0.536711
+0.0499524 0.762127 -0.285496 -0.578929
+-0.0499525 0.762127 -0.207488 -0.611241
+-0.149003 0.749087 -0.12593 -0.633094
+-0.245504 0.72323 -0.0422175 -0.644115
+-0.337804 0.684998 0.0422177 -0.644115
+-0.424324 0.635045 0.12593 -0.633094
+-0.503584 0.574227 0.207488 -0.611241
+-0.574227 0.503584 0.285496 -0.578929
+-0.635045 0.424324 0.358619 -0.536711
+-0.684998 0.337804 0.425606 -0.48531
+-0.72323 0.245503 0.485311 -0.425606
+-0.749087 0.149003 0.536711 -0.358619
+-0.762127 0.0499523 0.578929 -0.285496
+0.762127 0.0499525 -0.644115 -0.0422175
+0.749087 0.149003 -0.633094 -0.12593
+0.72323 0.245503 -0.611241 -0.207488
+0.684998 0.337804 -0.578929 -0.285496
+0.635045 0.424324 -0.536711 -0.358619
+0.574227 0.503584 -0.485311 -0.425606
+0.503584 0.574227 -0.425606 -0.485311
+0.424324 0.635045 -0.358619 -0.536711
+0.337804 0.684998 -0.285496 -0.578929
+0.245503 0.72323 -0.207488 -0.611241
+0.149003 0.749087 -0.12593 -0.633094
+0.0499524 0.762127 -0.0422175 -0.644115
+-0.0499525 0.762127 0.0422176 -0.644115
+-0.149003 0.749087 0.12593 -0.633094
+-0.245504 0.72323 0.207488 -0.611241
+-0.337804 0.684998 0.285496 -0.578929
+-0.424324 0.635045 0.358619 -0.536711
+-0.503584 0.574227 0.425606 -0.48531
+-0.574227 0.503584 0.485311 -0.425606
+-0.635045 0.424324 0.536711 -0.358619
+-0.684998 0.337804 0.578929 -0.285496
+-0.72323 0.245503 0.611241 -0.207488
+-0.749087 0.149003 0.633094 -0.12593
+-0.762127 0.0499523 0.644115 -0.0422174
+0.814748 0.0534014 -0.557678 -0.149429
+0.800808 0.159291 -0.533402 -0.220942
+0.773165 0.262454 -0.5 -0.288675
+0.732294 0.361127 -0.458043 -0.351469
+0.678892 0.453621 -0.408248 -0.408248
+0.613875 0.538354 -0.351469 -0.458043
+0.538354 0.613875 -0.288675 -0.5
+0.453621 0.678892 -0.220942 -0.533402
+0.361127 0.732294 -0.149429 -0.557678
+0.262454 0.773165 -0.0753593 -0.572411
+0.159291 0.800808 -2.09338e-08 -0.57735
+0.0534014 0.814748 0.0753594 -0.572411
+-0.0534015 0.814748 0.149429 -0.557678
+-0.159291 0.800808 0.220942 -0.533402
+-0.262454 0.773165 0.288675 -0.5
+-0.361127 0.732293 0.351469 -0.458043
+-0.453621 0.678892 0.408248 -0.408248
+-0.538354 0.613875 0.458043 -0.351469
+-0.613875 0.538354 0.5 -0.288675
+-0.678892 0.453621 0.533402 -0.220942
+-0.732294 0.361127 0.557678 -0.149429
+-0.773165 0.262454 0.572411 -0.0753592
+-0.800808 0.15929 0.57735 1.41954e-07
+-0.814748 0.0534013 0.572411 0.0753595
+0.814748 0.0534014 -0.572411 0.0753593
+0.800808 0.159291 -0.57735 -3.1546e-09
+0.773165 0.262454 -0.572411 -0.0753593
+0.732294 0.361127 -0.557678 -0.149429
+0.678892 0.453621 -0.533402 -0.220942
+0.613875 0.538354 -0.5 -0.288675
+0.538354 0.613875 -0.458043 -0.351469
+0.453621 0.678892 -0.408248 -0.408248
+0.361127 0.732294 -0.351469 -0.458043
+0.262454 0.773165 -0.288675 -0.5
+0.159291 0.800808 -0.220942 -0.533402
+0.0534014 0.814748 -0.149429 -0.557678
+-0.0534015 0.814748 -0.0753593 -0.572411
+-0.159291 0.800808 2.58181e-09 -0.57735
+-0.262454 0.773165 0.0753594 -0.572411
+-0.361127 0.732293 0.149429 -0.557678
+-0.453621 0.678892 0.220942 -0.533402
+-0.538354 0.613875 0.288675 -0.5
+-0.613875 0.538354 0.351469 -0.458043
+-0.678892 0.453621 0.408248 -0.408248
+-0.732294 0.361127 0.458043 -0.351469
+-0.773165 0.262454 0.5 -0.288675
+-0.800808 0.15929 0.533402 -0.220942
+-0.814748 0.0534013 0.557678 -0.149429
+0.864171 0.0566408 -0.498929 -0.0327016
+0.849385 0.168953 -0.490393 -0.0975452
+0.820066 0.278375 -0.473465 -0.16072
+0.776715 0.383033 -0.448436 -0.221144
+0.720074 0.481138 -0.415735 -0.277785
+0.651112 0.57101 -0.37592 -0.329673
+0.57101 0.651112 -0.329673 -0.37592
+0.481138 0.720074 -0.277785 -0.415735
+0.383033 0.776715 -0.221144 -0.448436
+0.278375 0.820066 -0.16072 -0.473465
+0.168953 0.849385 -0.0975452 -0.490393
+0.0566407 0.864171 -0.0327015 -0.498929
+-0.0566408 0.864171 0.0327016 -0.498929
+-0.168953 0.849385 0.0975452 -0.490393
+-0.278375 0.820066 0.16072 -0.473465
+-0.383033 0.776715 0.221144 -0.448436
+-0.481138 0.720074 0.277785 -0.415735
+-0.57101 0.651112 0.329673 -0.37592
+-0.651112 0.57101 0.37592 -0.329673
+-0.720074 0.481138 0.415735 -0.277785
+-0.776715 0.383033 0.448436 -0.221144
+-0.820066 0.278375 0.473465 -0.16072
+-0.849385 0.168953 0.490393 -0.097545
+-0.864171 0.0566406 0.498929 -0.0327015
+0.498929 0.0327016 0.0566408 -0.864171
+0.490393 0.0975452 0.168953 -0.849385
+0.473465 0.16072 0.278375 -0.820066
+0.448436 0.221144 0.383033 -0.776715
+0.415735 0.277785 0.481138 -0.720074
+0.37592 0.329673 0.57101 -0.651112
+0.329673 0.37592 0.651112 -0.57101
+0.277785 0.415735 0.720074 -0.481138
+0.221144 0.448436 0.776715 -0.383033
+0.16072 0.473465 0.820066 -0.278375
+0.0975452 0.490393 0.849385 -0.168953
+0.0327015 0.498929 0.864171 -0.0566407
+-0.0327016 0.498929 0.864171 0.0566408
+-0.0975452 0.490393 0.849385 0.168953
+-0.16072 0.473465 0.820066 0.278375
+-0.221144 0.448436 0.776715 0.383033
+-0.277785 0.415735 0.720074 0.481138
+-0.329673 0.37592 0.651112 0.57101
+-0.37592 0.329673 0.57101 0.651112
+-0.415735 0.277785 0.481138 0.720074
+-0.448436 0.221144 0.383033 0.776715
+-0.473465 0.16072 0.278375 0.820066
+-0.490393 0.097545 0.168953 0.849385
+-0.498929 0.0327015 0.0566406 0.864171
+0.576114 0.0377605 0.211325 -0.788675
+0.566257 0.112635 0.31246 -0.754344
+0.54671 0.185583 0.408248 -0.707107
+0.51781 0.255356 0.497052 -0.64777
+0.480049 0.320759 0.57735 -0.57735
+0.434075 0.380673 0.64777 -0.497052
+0.380673 0.434075 0.707107 -0.408248
+0.320759 0.480049 0.754344 -0.31246
+0.255355 0.51781 0.788675 -0.211325
+0.185583 0.54671 0.809511 -0.106574
+0.112635 0.566257 0.816497 -2.96048e-08
+0.0377605 0.576114 0.809511 0.106574
+-0.0377605 0.576114 0.788675 0.211325
+-0.112635 0.566257 0.754344 0.31246
+-0.185583 0.54671 0.707107 0.408248
+-0.255356 0.51781 0.64777 0.497052
+-0.320759 0.480049 0.57735 0.57735
+-0.380674 0.434075 0.497052 0.64777
+-0.434075 0.380673 0.408248 0.707107
+-0.480049 0.320759 0.31246 0.754345
+-0.51781 0.255355 0.211325 0.788675
+-0.54671 0.185583 0.106574 0.809511
+-0.566257 0.112635 -2.00753e-07 0.816497
+-0.576114 0.0377604 -0.106574 0.809511
+0.576114 0.0377605 -0.106574 -0.809511
+0.566257 0.112635 4.46127e-09 -0.816497
+0.54671 0.185583 0.106574 -0.809511
+0.51781 0.255356 0.211325 -0.788675
+0.480049 0.320759 0.31246 -0.754344
+0.434075 0.380673 0.408248 -0.707107
+0.380673 0.434075 0.497052 -0.64777
+0.320759 0.480049 0.57735 -0.57735
+0.255355 0.51781 0.64777 -0.497052
+0.185583 0.54671 0.707107 -0.408248
+0.112635 0.566257 0.754344 -0.31246
+0.0377605 0.576114 0.788675 -0.211325
+-0.0377605 0.576114 0.809511 -0.106574
+-0.112635 0.566257 0.816497 3.65123e-09
+-0.185583 0.54671 0.809511 0.106574
+-0.255356 0.51781 0.788675 0.211325
+-0.320759 0.480049 0.754344 0.31246
+-0.380674 0.434075 0.707107 0.408248
+-0.434075 0.380673 0.64777 0.497052
+-0.480049 0.320759 0.57735 0.57735
+-0.51781 0.255355 0.497052 0.64777
+-0.54671 0.185583 0.408248 0.707107
+-0.566257 0.112635 0.31246 0.754345
+-0.576114 0.0377604 0.211325 0.788675
+0.644115 0.0422175 0.0499525 -0.762127
+0.633094 0.12593 0.149003 -0.749087
+0.611241 0.207488 0.245503 -0.72323
+0.578929 0.285496 0.337804 -0.684998
+0.536711 0.358619 0.424324 -0.635045
+0.485311 0.425606 0.503584 -0.574227
+0.425606 0.485311 0.574227 -0.503584
+0.358619 0.536711 0.635045 -0.424324
+0.285496 0.578929 0.684998 -0.337804
+0.207488 0.611241 0.72323 -0.245503
+0.12593 0.633094 0.749087 -0.149003
+0.0422175 0.644115 0.762127 -0.0499524
+-0.0422176 0.644115 0.762127 0.0499525
+-0.12593 0.633094 0.749087 0.149003
+-0.207488 0.611241 0.72323 0.245504
+-0.285496 0.578929 0.684998 0.337804
+-0.358619 0.536711 0.635045 0.424324
+-0.425606 0.48531 0.574227 0.503584
+-0.485311 0.425606 0.503584 0.574227
+-0.536711 0.358619 0.424324 0.635045
+-0.578929 0.285496 0.337804 0.684998
+-0.611241 0.207488 0.245503 0.72323
+-0.633094 0.12593 0.149003 0.749087
+-0.644115 0.0422174 0.0499523 0.762127
+0.644115 0.0422175 0.337804 -0.684998
+0.633094 0.12593 0.424324 -0.635045
+0.611241 0.207488 0.503584 -0.574227
+0.578929 0.285496 0.574227 -0.503584
+0.536711 0.358619 0.635045 -0.424324
+0.485311 0.425606 0.684998 -0.337804
+0.425606 0.485311 0.72323 -0.245503
+0.358619 0.536711 0.749087 -0.149003
+0.285496 0.578929 0.762127 -0.0499525
+0.207488 0.611241 0.762127 0.0499525
+0.12593 0.633094 0.749087 0.149003
+0.0422175 0.644115 0.72323 0.245503
+-0.0422176 0.644115 0.684998 0.337804
+-0.12593 0.633094 0.635045 0.424324
+-0.207488 0.611241 0.574227 0.503584
+-0.285496 0.578929 0.503584 0.574227
+-0.358619 0.536711 0.424324 0.635045
+-0.425606 0.48531 0.337803 0.684998
+-0.485311 0.425606 0.245503 0.72323
+-0.536711 0.358619 0.149003 0.749087
+-0.578929 0.285496 0.0499524 0.762127
+-0.611241 0.207488 -0.0499527 0.762127
+-0.633094 0.12593 -0.149003 0.749087
+-0.644115 0.0422174 -0.245504 0.72323
+0.705593 0.046247 0.430459 -0.560986
+0.69352 0.13795 0.5 -0.5
+0.669581 0.227292 0.560986 -0.430459
+0.634185 0.312745 0.612372 -0.353553
+0.587938 0.392847 0.653281 -0.270598
+0.531631 0.466228 0.683013 -0.183013
+0.466228 0.531631 0.701057 -0.0922959
+0.392847 0.587938 0.707107 2.98546e-08
+0.312745 0.634185 0.701057 0.092296
+0.227292 0.669581 0.683013 0.183013
+0.13795 0.69352 0.653281 0.270598
+0.046247 0.705593 0.612372 0.353553
+-0.046247 0.705593 0.560986 0.430459
+-0.13795 0.69352 0.5 0.5
+-0.227292 0.669581 0.430459 0.560986
+-0.312745 0.634185 0.353553 0.612373
+-0.392848 0.587938 0.270598 0.653282
+-0.466228 0.531631 0.183013 0.683013
+-0.531631 0.466228 0.0922959 0.701057
+-0.587938 0.392847 -6.07632e-08 0.707107
+-0.634185 0.312745 -0.092296 0.701057
+-0.669581 0.227292 -0.183013 0.683013
+-0.69352 0.13795 -0.270598 0.653281
+-0.705593 0.0462468 -0.353554 0.612372
+0.705593 0.046247 0.183013 -0.683013
+0.69352 0.13795 0.270598 -0.653281
+0.669581 0.227292 0.353553 -0.612372
+0.634185 0.312745 0.430459 -0.560986
+0.587938 0.392847 0.5 -0.5
+0.531631 0.466228 0.560986 -0.430459
+0.466228 0.531631 0.612372 -0.353553
+0.392847 0.587938 0.653281 -0.270598
+0.312745 0.634185 0.683013 -0.183013
+0.227292 0.669581 0.701057 -0.092296
+0.13795 0.69352 0.707107 -2.56385e-08
+0.046247 0.705593 0.701057 0.092296
+-0.046247 0.705593 0.683013 0.183013
+-0.13795 0.69352 0.653281 0.270598
+-0.227292 0.669581 0.612372 0.353553
+-0.312745 0.634185 0.560985 0.430459
+-0.392848 0.587938 0.5 0.5
+-0.466228 0.531631 0.430459 0.560986
+-0.531631 0.466228 0.353553 0.612372
+-0.587938 0.392847 0.270598 0.653282
+-0.634185 0.312745 0.183013 0.683013
+-0.669581 0.227292 0.0922958 0.701057
+-0.69352 0.13795 -1.73857e-07 0.707107
+-0.705593 0.0462468 -0.0922961 0.701057
+0.762127 0.0499525 0.285496 -0.578929
+0.749087 0.149003 0.358619 -0.536711
+0.72323 0.245503 0.425606 -0.485311
+0.684998 0.337804 0.485311 -0.425606
+0.635045 0.424324 0.536711 -0.358619
+0.574227 0.503584 0.578929 -0.285496
+0.503584 0.574227 0.611241 -0.207488
+0.424324 0.635045 0.633094 -0.12593
+0.337804 0.684998 0.644115 -0.0422175
+0.245503 0.72323 0.644115 0.0422175
+0.149003 0.749087 0.633094 0.12593
+0.0499524 0.762127 0.611241 0.207488
+-0.0499525 0.762127 0.578929 0.285496
+-0.149003 0.749087 0.536711 0.358619
+-0.245504 0.72323 0.48531 0.425606
+-0.337804 0.684998 0.425606 0.485311
+-0.424324 0.635045 0.358619 0.536711
+-0.503584 0.574227 0.285496 0.578929
+-0.574227 0.503584 0.207488 0.611241
+-0.635045 0.424324 0.12593 0.633094
+-0.684998 0.337804 0.0422175 0.644115
+-0.72323 0.245503 -0.0422177 0.644115
+-0.749087 0.149003 -0.12593 0.633094
+-0.762127 0.0499523 -0.207488 0.611241
+0.644115 0.0422175 -0.245503 -0.72323
+0.633094 0.12593 -0.149003 -0.749087
+0.611241 0.207488 -0.0499525 -0.762127
+0.578929 0.285496 0.0499525 -0.762127
+0.536711 0.358619 0.149003 -0.749087
+0.485311 0.425606 0.245503 -0.72323
+0.425606 0.485311 0.337804 -0.684998
+0.358619 0.536711 0.424324 -0.635045
+0.285496 0.578929 0.503584 -0.574227
+0.207488 0.611241 0.574227 -0.503584
+0.12593 0.633094 0.635045 -0.424324
+0.0422175 0.644115 0.684998 -0.337804
+-0.0422176 0.644115 0.72323 -0.245503
+-0.12593 0.633094 0.749087 -0.149003
+-0.207488 0.611241 0.762127 -0.0499524
+-0.285496 0.578929 0.762127 0.0499526
+-0.358619 0.536711 0.749087 0.149003
+-0.425606 0.48531 0.72323 0.245504
+-0.485311 0.425606 0.684998 0.337804
+-0.536711 0.358619 0.635045 0.424324
+-0.578929 0.285496 0.574227 0.503584
+-0.611241 0.207488 0.503584 0.574227
+-0.633094 0.12593 0.424324 0.635046
+-0.644115 0.0422174 0.337803 0.684998
+0.705593 0.046247 -0.092296 -0.701057
+0.69352 0.13795 3.86358e-09 -0.707107
+0.669581 0.227292 0.092296 -0.701057
+0.634185 0.312745 0.183013 -0.683013
+0.587938 0.392847 0.270598 -0.653281
+0.531631 0.466228 0.353553 -0.612372
+0.466228 0.531631 0.430459 -0.560986
+0.392847 0.587938 0.5 -0.5
+0.312745 0.634185 0.560986 -0.430459
+0.227292 0.669581 0.612372 -0.353553
+0.13795 0.69352 0.653281 -0.270598
+0.046247 0.705593 0.683013 -0.183013
+-0.046247 0.705593 0.701057 -0.0922959
+-0.13795 0.69352 0.707107 3.16206e-09
+-0.227292 0.669581 0.701057 0.092296
+-0.312745 0.634185 0.683013 0.183013
+-0.392848 0.587938 0.653281 0.270598
+-0.466228 0.531631 0.612372 0.353553
+-0.531631 0.466228 0.560985 0.430459
+-0.587938 0.392847 0.5 0.5
+-0.634185 0.312745 0.430459 0.560986
+-0.669581 0.227292 0.353553 0.612373
+-0.69352 0.13795 0.270598 0.653282
+-0.705593 0.0462468 0.183013 0.683013
+0.705593 0.046247 -0.353553 -0.612372
+0.69352 0.13795 -0.270598 -0.653281
+0.669581 0.227292 -0.183013 -0.683013
+0.634185 0.312745 -0.0922959 -0.701057
+0.587938 0.392847 1.05402e-09 -0.707107
+0.531631 0.466228 0.0922959 -0.701057
+0.466228 0.531631 0.183013 -0.683013
+0.392847 0.587938 0.270598 -0.653281
+0.312745 0.634185 0.353553 -0.612372
+0.227292 0.669581 0.430459 -0.560986
+0.13795 0.69352 0.5 -0.5
+0.046247 0.705593 0.560986 -0.430459
+-0.046247 0.705593 0.612372 -0.353553
+-0.13795 0.69352 0.653281 -0.270598
+-0.227292 0.669581 0.683013 -0.183013
+-0.312745 0.634185 0.701057 -0.0922958
+-0.392848 0.587938 0.707107 1.16256e-07
+-0.466228 0.531631 0.701057 0.0922961
+-0.531631 0.466228 0.683013 0.183013
+-0.587938 0.392847 0.653281 0.270598
+-0.634185 0.312745 0.612372 0.353553
+-0.669581 0.227292 0.560985 0.430459
+-0.69352 0.13795 0.5 0.5
+-0.705593 0.0462468 0.430459 0.560986
+0.762127 0.0499525 -0.207488 -0.611241
+0.749087 0.149003 -0.12593 -0.633094
+0.72323 0.245503 -0.0422175 -0.644115
+0.684998 0.337804 0.0422176 -0.644115
+0.635045 0.424324 0.12593 -0.633094
+0.574227 0.503584 0.207488 -0.611241
+0.503584 0.574227 0.285496 -0.578929
+0.424324 0.635045 0.358619 -0.536711
+0.337804 0.684998 0.425606 -0.485311
+0.245503 0.72323 0.485311 -0.425606
+0.149003 0.749087 0.536711 -0.358619
+0.0499524 0.762127 0.578929 -0.285496
+-0.0499525 0.762127 0.611241 -0.207488
+-0.149003 0.749087 0.633094 -0.12593
+-0.245504 0.72323 0.644115 -0.0422175
+-0.337804 0.684998 0.644115 0.0422177
+-0.424324 0.635045 0.633094 0.12593
+-0.503584 0.574227 0.611241 0.207488
+-0.574227 0.503584 0.578929 0.285496
+-0.635045 0.424324 0.536711 0.358619
+-0.684998 0.337804 0.48531 0.425606
+-0.72323 0.245503 0.425606 0.485311
+-0.749087 0.149003 0.358619 0.536711
+-0.762127 0.0499523 0.285496 0.578929
+0.762127 0.0499525 0.0422175 -0.644115
+0.749087 0.149003 0.12593 -0.633094
+0.72323 0.245503 0.207488 -0.611241
+0.684998 0.337804 0.285496 -0.578929
+0.635045 0.424324 0.358619 -0.536711
+0.574227 0.503584 0.425606 -0.485311
+0.503584 0.574227 0.485311 -0.425606
+0.424324 0.635045 0.536711 -0.358619
+0.337804 0.684998 0.578929 -0.285496
+0.245503 0.72323 0.611241 -0.207488
+0.149003 0.749087 0.633094 -0.12593
+0.0499524 0.762127 0.644115 -0.0422175
+-0.0499525 0.762127 0.644115 0.0422176
+-0.149003 0.749087 0.633094 0.12593
+-0.245504 0.72323 0.611241 0.207488
+-0.337804 0.684998 0.578929 0.285496
+-0.424324 0.635045 0.536711 0.358619
+-0.503584 0.574227 0.48531 0.425606
+-0.574227 0.503584 0.425606 0.485311
+-0.635045 0.424324 0.358619 0.536711
+-0.684998 0.337804 0.285496 0.578929
+-0.72323 0.245503 0.207488 0.611241
+-0.749087 0.149003 0.12593 0.633094
+-0.762127 0.0499523 0.0422174 0.644115
+0.814748 0.0534014 0.149429 -0.557678
+0.800808 0.159291 0.220942 -0.533402
+0.773165 0.262454 0.288675 -0.5
+0.732294 0.361127 0.351469 -0.458043
+0.678892 0.453621 0.408248 -0.408248
+0.613875 0.538354 0.458043 -0.351469
+0.538354 0.613875 0.5 -0.288675
+0.453621 0.678892 0.533402 -0.220942
+0.361127 0.732294 0.557678 -0.149429
+0.262454 0.773165 0.572411 -0.0753593
+0.159291 0.800808 0.57735 -2.09338e-08
+0.0534014 0.814748 0.572411 0.0753594
+-0.0534015 0.814748 0.557678 0.149429
+-0.159291 0.800808 0.533402 0.220942
+-0.262454 0.773165 0.5 0.288675
+-0.361127 0.732293 0.458043 0.351469
+-0.453621 0.678892 0.408248 0.408248
+-0.538354 0.613875 0.351469 0.458043
+-0.613875 0.538354 0.288675 0.5
+-0.678892 0.453621 0.220942 0.533402
+-0.732294 0.361127 0.149429 0.557678
+-0.773165 0.262454 0.0753592 0.572411
+-0.800808 0.15929 -1.41954e-07 0.57735
+-0.814748 0.0534013 -0.0753595 0.572411
+0.814748 0.0534014 -0.0753593 -0.572411
+0.800808 0.159291 3.1546e-09 -0.57735
+0.773165 0.262454 0.0753593 -0.572411
+0.732294 0.361127 0.149429 -0.557678
+0.678892 0.453621 0.220942 -0.533402
+0.613875 0.538354 0.288675 -0.5
+0.538354 0.613875 0.351469 -0.458043
+0.453621 0.678892 0.408248 -0.408248
+0.361127 0.732294 0.458043 -0.351469
+0.262454 0.773165 0.5 -0.288675
+0.159291 0.800808 0.533402 -0.220942
+0.0534014 0.814748 0.557678 -0.149429
+-0.0534015 0.814748 0.572411 -0.0753593
+-0.159291 0.800808 0.57735 2.58181e-09
+-0.262454 0.773165 0.572411 0.0753594
+-0.361127 0.732293 0.557678 0.149429
+-0.453621 0.678892 0.533402 0.220942
+-0.538354 0.613875 0.5 0.288675
+-0.613875 0.538354 0.458043 0.351469
+-0.678892 0.453621 0.408248 0.408248
+-0.732294 0.361127 0.351469 0.458043
+-0.773165 0.262454 0.288675 0.5
+-0.800808 0.15929 0.220942 0.533402
+-0.814748 0.0534013 0.149429 0.557678
+0.864171 0.0566408 0.0327016 -0.498929
+0.849385 0.168953 0.0975452 -0.490393
+0.820066 0.278375 0.16072 -0.473465
+0.776715 0.383033 0.221144 -0.448436
+0.720074 0.481138 0.277785 -0.415735
+0.651112 0.57101 0.329673 -0.37592
+0.57101 0.651112 0.37592 -0.329673
+0.481138 0.720074 0.415735 -0.277785
+0.383033 0.776715 0.448436 -0.221144
+0.278375 0.820066 0.473465 -0.16072
+0.168953 0.849385 0.490393 -0.0975452
+0.0566407 0.864171 0.498929 -0.0327015
+-0.0566408 0.864171 0.498929 0.0327016
+-0.168953 0.849385 0.490393 0.0975452
+-0.278375 0.820066 0.473465 0.16072
+-0.383033 0.776715 0.448436 0.221144
+-0.481138 0.720074 0.415735 0.277785
+-0.57101 0.651112 0.37592 0.329673
+-0.651112 0.57101 0.329673 0.37592
+-0.720074 0.481138 0.277785 0.415735
+-0.776715 0.383033 0.221144 0.448436
+-0.820066 0.278375 0.16072 0.473465
+-0.849385 0.168953 0.097545 0.490393
+-0.864171 0.0566406 0.0327015 0.498929
+0.101844 0.00667518 0.655903 0.747914
+0.100101 0.0199113 0.552669 0.827128
+0.0966457 0.0328068 0.439979 0.892189
+0.0915367 0.0451409 0.319761 0.941985
+0.0848615 0.0567026 0.194072 0.975664
+0.0767343 0.0672942 0.0650616 0.992648
+0.0672942 0.0767343 -0.0650616 0.992648
+0.0567026 0.0848615 -0.194072 0.975664
+0.0451409 0.0915367 -0.319761 0.941985
+0.0328068 0.0966457 -0.439979 0.892189
+0.0199113 0.100101 -0.552669 0.827128
+0.00667517 0.101844 -0.655903 0.747914
+-0.00667518 0.101844 -0.747914 0.655903
+-0.0199113 0.100101 -0.827128 0.552669
+-0.0328068 0.0966456 -0.892189 0.439979
+-0.0451409 0.0915367 -0.941985 0.319761
+-0.0567027 0.0848615 -0.975664 0.194071
+-0.0672942 0.0767343 -0.992648 0.0650615
+-0.0767343 0.0672942 -0.992648 -0.0650617
+-0.0848615 0.0567026 -0.975664 -0.194072
+-0.0915367 0.0451409 -0.941985 -0.319761
+-0.0966457 0.0328068 -0.892189 -0.439979
+-0.100101 0.0199113 -0.827128 -0.552669
+-0.101844 0.00667516 -0.747914 -0.655903
+0.203687 0.0133504 0.314672 0.926993
+0.200202 0.0398226 0.190983 0.960135
+0.193291 0.0656136 0.0640261 0.976849
+0.183073 0.0902818 -0.0640261 0.976849
+0.169723 0.113405 -0.190983 0.960135
+0.153469 0.134588 -0.314672 0.926993
+0.134588 0.153469 -0.432976 0.877989
+0.113405 0.169723 -0.543873 0.813963
+0.0902818 0.183073 -0.645463 0.73601
+0.0656136 0.193291 -0.73601 0.645463
+0.0398227 0.200202 -0.813963 0.543873
+0.0133503 0.203687 -0.877989 0.432976
+-0.0133504 0.203687 -0.926993 0.314672
+-0.0398226 0.200202 -0.960135 0.190983
+-0.0656136 0.193291 -0.976849 0.064026
+-0.0902818 0.183073 -0.976849 -0.0640263
+-0.113405 0.169723 -0.960135 -0.190983
+-0.134588 0.153469 -0.926992 -0.314672
+-0.153469 0.134588 -0.877989 -0.432976
+-0.169723 0.113405 -0.813963 -0.543873
+-0.183073 0.0902818 -0.73601 -0.645463
+-0.193291 0.0656135 -0.645463 -0.73601
+-0.200202 0.0398226 -0.543873 -0.813963
+-0.203687 0.0133503 -0.432976 -0.877989
+0.203687 0.0133504 0.877989 0.432976
+0.200202 0.0398226 0.813963 0.543873
+0.193291 0.0656136 0.73601 0.645463
+0.183073 0.0902818 0.645463 0.73601
+0.169723 0.113405 0.543873 0.813963
+0.153469 0.134588 0.432976 0.877989
+0.134588 0.153469 0.314672 0.926993
+0.113405 0.169723 0.190983 0.960135
+0.0902818 0.183073 0.0640261 0.976849
+0.0656136 0.193291 -0.0640261 0.976849
+0.0398227 0.200202 -0.190983 0.960135
+0.0133503 0.203687 -0.314672 0.926993
+-0.0133504 0.203687 -0.432976 0.877989
+-0.0398226 0.200202 -0.543873 0.813963
+-0.0656136 0.193291 -0.645463 0.73601
+-0.0902818 0.183073 -0.73601 0.645463
+-0.113405 0.169723 -0.813963 0.543873
+-0.134588 0.153469 -0.877989 0.432976
+-0.153469 0.134588 -0.926993 0.314671
+-0.169723 0.113405 -0.960135 0.190983
+-0.183073 0.0902818 -0.976849 0.064026
+-0.193291 0.0656135 -0.976849 -0.0640263
+-0.200202 0.0398226 -0.960135 -0.190983
+-0.203687 0.0133503 -0.926992 -0.314672
+0.305531 0.0200255 0.627679 0.71573
+0.300303 0.059734 0.528887 0.791535
+0.289937 0.0984203 0.421046 0.853797
+0.27461 0.135423 0.306001 0.901451
+0.254585 0.170108 0.18572 0.93368
+0.230203 0.201883 0.0622619 0.949933
+0.201883 0.230203 -0.0622619 0.949933
+0.170108 0.254585 -0.18572 0.93368
+0.135423 0.27461 -0.306001 0.901451
+0.0984203 0.289937 -0.421046 0.853797
+0.059734 0.300303 -0.528887 0.791536
+0.0200255 0.305531 -0.627679 0.71573
+-0.0200255 0.305531 -0.71573 0.627678
+-0.059734 0.300303 -0.791535 0.528887
+-0.0984204 0.289937 -0.853797 0.421046
+-0.135423 0.27461 -0.901451 0.306001
+-0.170108 0.254585 -0.93368 0.18572
+-0.201883 0.230203 -0.949933 0.0622618
+-0.230203 0.201883 -0.949933 -0.062262
+-0.254585 0.170108 -0.93368 -0.185721
+-0.27461 0.135423 -0.901451 -0.306001
+-0.289937 0.0984203 -0.853797 -0.421047
+-0.300303 0.0597339 -0.791535 -0.528887
+-0.305531 0.0200255 -0.71573 -0.627679
+0.305531 0.0200255 0.18572 0.93368
+0.300303 0.059734 0.0622619 0.949933
+0.289937 0.0984203 -0.0622619 0.949933
+0.27461 0.135423 -0.18572 0.93368
+0.254585 0.170108 -0.306001 0.901451
+0.230203 0.201883 -0.421046 0.853797
+0.201883 0.230203 -0.528887 0.791535
+0.170108 0.254585 -0.627679 0.71573
+0.135423 0.27461 -0.71573 0.627679
+0.0984203 0.289937 -0.791535 0.528887
+0.059734 0.300303 -0.853797 0.421046
+0.0200255 0.305531 -0.901451 0.306001
+-0.0200255 0.305531 -0.93368 0.18572
+-0.059734 0.300303 -0.949933 0.0622619
+-0.0984204 0.289937 -0.949933 -0.062262
+-0.135423 0.27461 -0.93368 -0.185721
+-0.170108 0.254585 -0.901451 -0.306001
+-0.201883 0.230203 -0.853797 -0.421046
+-0.230203 0.201883 -0.791535 -0.528887
+-0.254585 0.170108 -0.71573 -0.627679
+-0.27461 0.135423 -0.627678 -0.71573
+-0.289937 0.0984203 -0.528887 -0.791536
+-0.300303 0.0597339 -0.421046 -0.853798
+-0.305531 0.0200255 -0.306001 -0.901451
+0.407374 0.0267007 0.119154 0.905061
+0.400404 0.0796453 -4.98786e-09 0.912871
+0.386583 0.131227 -0.119154 0.905061
+0.366147 0.180564 -0.236268 0.881766
+0.339446 0.226811 -0.349341 0.843383
+0.306937 0.269177 -0.456435 0.790569
+0.269177 0.306937 -0.555721 0.724229
+0.226811 0.339446 -0.645497 0.645497
+0.180564 0.366147 -0.724229 0.555721
+0.131227 0.386583 -0.790569 0.456435
+0.0796453 0.400404 -0.843383 0.349341
+0.0267007 0.407374 -0.881766 0.236268
+-0.0267007 0.407374 -0.905061 0.119154
+-0.0796453 0.400404 -0.912871 -4.0822e-09
+-0.131227 0.386583 -0.905061 -0.119154
+-0.180564 0.366147 -0.881766 -0.236269
+-0.226811 0.339446 -0.843383 -0.349341
+-0.269177 0.306937 -0.790569 -0.456436
+-0.306937 0.269177 -0.724229 -0.555721
+-0.339446 0.226811 -0.645497 -0.645497
+-0.366147 0.180564 -0.555721 -0.724229
+-0.386583 0.131227 -0.456435 -0.79057
+-0.400404 0.0796452 -0.34934 -0.843383
+-0.407374 0.0267006 -0.236268 -0.881766
+0.407374 0.0267007 0.456435 0.790569
+0.400404 0.0796453 0.349341 0.843383
+0.386583 0.131227 0.236268 0.881766
+0.366147 0.180564 0.119154 0.905061
+0.339446 0.226811 -1.36073e-09 0.912871
+0.306937 0.269177 -0.119154 0.905061
+0.269177 0.306937 -0.236268 0.881766
+0.226811 0.339446 -0.349341 0.843383
+0.180564 0.366147 -0.456435 0.790569
+0.131227 0.386583 -0.555721 0.724229
+0.0796453 0.400404 -0.645497 0.645497
+0.0267007 0.407374 -0.724229 0.555721
+-0.0267007 0.407374 -0.790569 0.456435
+-0.0796453 0.400404 -0.843383 0.349341
+-0.131227 0.386583 -0.881766 0.236268
+-0.180564 0.366147 -0.905061 0.119153
+-0.226811 0.339446 -0.912871 -1.50086e-07
+-0.269177 0.306937 -0.905061 -0.119154
+-0.306937 0.269177 -0.881766 -0.236268
+-0.339446 0.226811 -0.843383 -0.349341
+-0.366147 0.180564 -0.790569 -0.456436
+-0.386583 0.131227 -0.724229 -0.555721
+-0.400404 0.0796452 -0.645497 -0.645497
+-0.407374 0.0267006 -0.55572 -0.724229
+0.498929 0.0327016 0.278375 0.820066
+0.490393 0.0975452 0.168953 0.849385
+0.473465 0.16072 0.0566408 0.864171
+0.448436 0.221144 -0.0566408 0.864171
+0.415735 0.277785 -0.168953 0.849385
+0.37592 0.329673 -0.278375 0.820066
+0.329673 0.37592 -0.383033 0.776715
+0.277785 0.415735 -0.481138 0.720074
+0.221144 0.448436 -0.57101 0.651112
+0.16072 0.473465 -0.651112 0.57101
+0.0975452 0.490393 -0.720074 0.481138
+0.0327015 0.498929 -0.776715 0.383033
+-0.0327016 0.498929 -0.820066 0.278375
+-0.0975452 0.490393 -0.849385 0.168953
+-0.16072 0.473465 -0.864171 0.0566407
+-0.221144 0.448436 -0.864171 -0.0566409
+-0.277785 0.415735 -0.849385 -0.168953
+-0.329673 0.37592 -0.820066 -0.278375
+-0.37592 0.329673 -0.776715 -0.383033
+-0.415735 0.277785 -0.720074 -0.481138
+-0.448436 0.221144 -0.651112 -0.57101
+-0.473465 0.16072 -0.57101 -0.651113
+-0.490393 0.097545 -0.481138 -0.720074
+-0.498929 0.0327015 -0.383033 -0.776715
+0.305531 0.0200255 0.901451 0.306001
+0.300303 0.059734 0.853797 0.421046
+0.289937 0.0984203 0.791535 0.528887
+0.27461 0.135423 0.71573 0.627679
+0.254585 0.170108 0.627679 0.71573
+0.230203 0.201883 0.528887 0.791535
+0.201883 0.230203 0.421046 0.853797
+0.170108 0.254585 0.306001 0.901451
+0.135423 0.27461 0.18572 0.93368
+0.0984203 0.289937 0.0622619 0.949933
+0.059734 0.300303 -0.0622619 0.949933
+0.0200255 0.305531 -0.185721 0.93368
+-0.0200255 0.305531 -0.306001 0.901451
+-0.059734 0.300303 -0.421046 0.853797
+-0.0984204 0.289937 -0.528887 0.791535
+-0.135423 0.27461 -0.627679 0.71573
+-0.170108 0.254585 -0.71573 0.627678
+-0.201883 0.230203 -0.791536 0.528887
+-0.230203 0.201883 -0.853797 0.421046
+-0.254585 0.170108 -0.901451 0.306001
+-0.27461 0.135423 -0.93368 0.18572
+-0.289937 0.0984203 -0.949933 0.0622617
+-0.300303 0.0597339 -0.949933 -0.0622622
+-0.305531 0.0200255 -0.93368 -0.185721
+0.407374 0.0267007 0.724229 0.555721
+0.400404 0.0796453 0.645497 0.645497
+0.386583 0.131227 0.555721 0.724229
+0.366147 0.180564 0.456435 0.790569
+0.339446 0.226811 0.349341 0.843383
+0.306937 0.269177 0.236268 0.881766
+0.269177 0.306937 0.119154 0.905061
+0.226811 0.339446 -3.85421e-08 0.912871
+0.180564 0.366147 -0.119154 0.905061
+0.131227 0.386583 -0.236268 0.881766
+0.0796453 0.400404 -0.349341 0.843383
+0.0267007 0.407374 -0.456436 0.790569
+-0.0267007 0.407374 -0.555721 0.724229
+-0.0796453 0.400404 -0.645497 0.645497
+-0.131227 0.386583 -0.724229 0.555721
+-0.180564 0.366147 -0.79057 0.456435
+-0.226811 0.339446 -0.843383 0.34934
+-0.269177 0.306937 -0.881766 0.236268
+-0.306937 0.269177 -0.905061 0.119153
+-0.339446 0.226811 -0.912871 -7.8445e-08
+-0.366147 0.180564 -0.905061 -0.119154
+-0.386583 0.131227 -0.881766 -0.236269
+-0.400404 0.0796452 -0.843383 -0.349341
+-0.407374 0.0267006 -0.790569 -0.456436
+0.407374 0.0267007 0.881766 0.236268
+0.400404 0.0796453 0.843383 0.349341
+0.386583 0.131227 0.790569 0.456435
+0.366147 0.180564 0.724229 0.555721
+0.339446 0.226811 0.645497 0.645497
+0.306937 0.269177 0.555721 0.724229
+0.269177 0.306937 0.456435 0.790569
+0.226811 0.339446 0.349341 0.843383
+0.180564 0.366147 0.236268 0.881766
+0.131227 0.386583 0.119154 0.905061
+0.0796453 0.400404 3.30992e-08 0.912871
+0.0267007 0.407374 -0.119154 0.905061
+-0.0267007 0.407374 -0.236268 0.881766
+-0.0796453 0.400404 -0.349341 0.843383
+-0.131227 0.386583 -0.456436 0.790569
+-0.180564 0.366147 -0.555721 0.724229
+-0.226811 0.339446 -0.645497 0.645497
+-0.269177 0.306937 -0.724229 0.555721
+-0.306937 0.269177 -0.790569 0.456435
+-0.339446 0.226811 -0.843383 0.349341
+-0.366147 0.180564 -0.881766 0.236268
+-0.386583 0.131227 -0.905061 0.119153
+-0.400404 0.0796452 -0.912871 -2.24449e-07
+-0.407374 0.0267006 -0.905061 -0.119154
+0.498929 0.0327016 0.776715 0.383033
+0.490393 0.0975452 0.720074 0.481138
+0.473465 0.16072 0.651112 0.57101
+0.448436 0.221144 0.57101 0.651112
+0.415735 0.277785 0.481138 0.720074
+0.37592 0.329673 0.383033 0.776715
+0.329673 0.37592 0.278375 0.820066
+0.277785 0.415735 0.168953 0.849385
+0.221144 0.448436 0.0566408 0.864171
+0.16072 0.473465 -0.0566408 0.864171
+0.0975452 0.490393 -0.168953 0.849385
+0.0327015 0.498929 -0.278375 0.820066
+-0.0327016 0.498929 -0.383033 0.776715
+-0.0975452 0.490393 -0.481138 0.720074
+-0.16072 0.473465 -0.57101 0.651112
+-0.221144 0.448436 -0.651112 0.57101
+-0.277785 0.415735 -0.720074 0.481138
+-0.329673 0.37592 -0.776715 0.383033
+-0.37592 0.329673 -0.820066 0.278375
+-0.415735 0.277785 -0.849385 0.168953
+-0.448436 0.221144 -0.864171 0.0566407
+-0.473465 0.16072 -0.864171 -0.056641
+-0.490393 0.097545 -0.849385 -0.168953
+-0.498929 0.0327015 -0.820065 -0.278375
+0.498929 0.0327016 0.57101 0.651112
+0.490393 0.0975452 0.481138 0.720074
+0.473465 0.16072 0.383033 0.776715
+0.448436 0.221144 0.278375 0.820066
+0.415735 0.277785 0.168953 0.849385
+0.37592 0.329673 0.0566408 0.864171
+0.329673 0.37592 -0.0566408 0.864171
+0.277785 0.415735 -0.168953 0.849385
+0.221144 0.448436 -0.278375 0.820066
+0.16072 0.473465 -0.383033 0.776715
+0.0975452 0.490393 -0.481138 0.720074
+0.0327015 0.498929 -0.57101 0.651112
+-0.0327016 0.498929 -0.651112 0.57101
+-0.0975452 0.490393 -0.720074 0.481138
+-0.16072 0.473465 -0.776715 0.383033
+-0.221144 0.448436 -0.820066 0.278375
+-0.277785 0.415735 -0.849385 0.168953
+-0.329673 0.37592 -0.864171 0.0566407
+-0.37592 0.329673 -0.864171 -0.0566409
+-0.415735 0.277785 -0.849385 -0.168953
+-0.448436 0.221144 -0.820066 -0.278375
+-0.473465 0.16072 -0.776714 -0.383033
+-0.490393 0.097545 -0.720074 -0.481138
+-0.498929 0.0327015 -0.651112 -0.57101
+0.576114 0.0377605 0.408248 0.707107
+0.566257 0.112635 0.31246 0.754344
+0.54671 0.185583 0.211325 0.788675
+0.51781 0.255356 0.106574 0.809511
+0.480049 0.320759 -1.21708e-09 0.816497
+0.434075 0.380673 -0.106574 0.809511
+0.380673 0.434075 -0.211325 0.788675
+0.320759 0.480049 -0.31246 0.754344
+0.255355 0.51781 -0.408248 0.707107
+0.185583 0.54671 -0.497052 0.64777
+0.112635 0.566257 -0.57735 0.57735
+0.0377605 0.576114 -0.64777 0.497052
+-0.0377605 0.576114 -0.707107 0.408248
+-0.112635 0.566257 -0.754344 0.31246
+-0.185583 0.54671 -0.788675 0.211325
+-0.255356 0.51781 -0.809511 0.106574
+-0.320759 0.480049 -0.816497 -1.34241e-07
+-0.380674 0.434075 -0.809511 -0.106574
+-0.434075 0.380673 -0.788675 -0.211325
+-0.480049 0.320759 -0.754344 -0.31246
+-0.51781 0.255355 -0.707107 -0.408248
+-0.54671 0.185583 -0.64777 -0.497052
+-0.566257 0.112635 -0.57735 -0.57735
+-0.576114 0.0377604 -0.497051 -0.64777
+0.576114 0.0377605 0.64777 0.497052
+0.566257 0.112635 0.57735 0.57735
+0.54671 0.185583 0.497052 0.64777
+0.51781 0.255356 0.408248 0.707107
+0.480049 0.320759 0.31246 0.754344
+0.434075 0.380673 0.211325 0.788675
+0.380673 0.434075 0.106574 0.809511
+0.320759 0.480049 -3.44731e-08 0.816497
+0.255355 0.51781 -0.106574 0.809511
+0.185583 0.54671 -0.211325 0.788675
+0.112635 0.566257 -0.31246 0.754344
+0.0377605 0.576114 -0.408248 0.707107
+-0.0377605 0.576114 -0.497052 0.64777
+-0.112635 0.566257 -0.57735 0.57735
+-0.185583 0.54671 -0.64777 0.497052
+-0.255356 0.51781 -0.707107 0.408248
+-0.320759 0.480049 -0.754345 0.31246
+-0.380674 0.434075 -0.788675 0.211325
+-0.434075 0.380673 -0.809511 0.106574
+-0.480049 0.320759 -0.816497 -7.01633e-08
+-0.51781 0.255355 -0.809511 -0.106574
+-0.54671 0.185583 -0.788675 -0.211325
+-0.566257 0.112635 -0.754344 -0.31246
+-0.576114 0.0377604 -0.707107 -0.408248
+0.644115 0.0422175 0.503584 0.574227
+0.633094 0.12593 0.424324 0.635045
+0.611241 0.207488 0.337804 0.684998
+0.578929 0.285496 0.245503 0.72323
+0.536711 0.358619 0.149003 0.749087
+0.485311 0.425606 0.0499525 0.762127
+0.425606 0.485311 -0.0499525 0.762127
+0.358619 0.536711 -0.149003 0.749087
+0.285496 0.578929 -0.245503 0.72323
+0.207488 0.611241 -0.337804 0.684998
+0.12593 0.633094 -0.424324 0.635045
+0.0422175 0.644115 -0.503584 0.574227
+-0.0422176 0.644115 -0.574227 0.503584
+-0.12593 0.633094 -0.635045 0.424324
+-0.207488 0.611241 -0.684998 0.337804
+-0.285496 0.578929 -0.72323 0.245503
+-0.358619 0.536711 -0.749087 0.149003
+-0.425606 0.48531 -0.762127 0.0499524
+-0.485311 0.425606 -0.762127 -0.0499526
+-0.536711 0.358619 -0.749087 -0.149003
+-0.578929 0.285496 -0.72323 -0.245503
+-0.611241 0.207488 -0.684998 -0.337804
+-0.633094 0.12593 -0.635045 -0.424324
+-0.644115 0.0422174 -0.574227 -0.503584
+0.101844 0.00667518 -0.747914 0.655903
+0.100101 0.0199113 -0.827128 0.552669
+0.0966457 0.0328068 -0.892189 0.439979
+0.0915367 0.0451409 -0.941985 0.319761
+0.0848615 0.0567026 -0.975664 0.194072
+0.0767343 0.0672942 -0.992648 0.0650616
+0.0672942 0.0767343 -0.992648 -0.0650616
+0.0567026 0.0848615 -0.975664 -0.194072
+0.0451409 0.0915367 -0.941985 -0.319761
+0.0328068 0.0966457 -0.892189 -0.439979
+0.0199113 0.100101 -0.827128 -0.552669
+0.00667517 0.101844 -0.747914 -0.655903
+-0.00667518 0.101844 -0.655903 -0.747914
+-0.0199113 0.100101 -0.552669 -0.827128
+-0.0328068 0.0966456 -0.439979 -0.892189
+-0.0451409 0.0915367 -0.319761 -0.941985
+-0.0567027 0.0848615 -0.194071 -0.975664
+-0.0672942 0.0767343 -0.0650615 -0.992648
+-0.0767343 0.0672942 0.0650617 -0.992648
+-0.0848615 0.0567026 0.194072 -0.975664
+-0.0915367 0.0451409 0.319761 -0.941985
+-0.0966457 0.0328068 0.439979 -0.892189
+-0.100101 0.0199113 0.552669 -0.827128
+-0.101844 0.00667516 0.655903 -0.747914
+0.203687 0.0133504 -0.926993 0.314672
+0.200202 0.0398226 -0.960135 0.190983
+0.193291 0.0656136 -0.976849 0.0640261
+0.183073 0.0902818 -0.976849 -0.0640261
+0.169723 0.113405 -0.960135 -0.190983
+0.153469 0.134588 -0.926993 -0.314672
+0.134588 0.153469 -0.877989 -0.432976
+0.113405 0.169723 -0.813963 -0.543873
+0.0902818 0.183073 -0.73601 -0.645463
+0.0656136 0.193291 -0.645463 -0.73601
+0.0398227 0.200202 -0.543873 -0.813963
+0.0133503 0.203687 -0.432976 -0.877989
+-0.0133504 0.203687 -0.314672 -0.926993
+-0.0398226 0.200202 -0.190983 -0.960135
+-0.0656136 0.193291 -0.064026 -0.976849
+-0.0902818 0.183073 0.0640263 -0.976849
+-0.113405 0.169723 0.190983 -0.960135
+-0.134588 0.153469 0.314672 -0.926992
+-0.153469 0.134588 0.432976 -0.877989
+-0.169723 0.113405 0.543873 -0.813963
+-0.183073 0.0902818 0.645463 -0.73601
+-0.193291 0.0656135 0.73601 -0.645463
+-0.200202 0.0398226 0.813963 -0.543873
+-0.203687 0.0133503 0.877989 -0.432976
+0.203687 0.0133504 -0.432976 0.877989
+0.200202 0.0398226 -0.543873 0.813963
+0.193291 0.0656136 -0.645463 0.73601
+0.183073 0.0902818 -0.73601 0.645463
+0.169723 0.113405 -0.813963 0.543873
+0.153469 0.134588 -0.877989 0.432976
+0.134588 0.153469 -0.926993 0.314672
+0.113405 0.169723 -0.960135 0.190983
+0.0902818 0.183073 -0.976849 0.0640261
+0.0656136 0.193291 -0.976849 -0.0640261
+0.0398227 0.200202 -0.960135 -0.190983
+0.0133503 0.203687 -0.926993 -0.314672
+-0.0133504 0.203687 -0.877989 -0.432976
+-0.0398226 0.200202 -0.813963 -0.543873
+-0.0656136 0.193291 -0.73601 -0.645463
+-0.0902818 0.183073 -0.645463 -0.73601
+-0.113405 0.169723 -0.543873 -0.813963
+-0.134588 0.153469 -0.432976 -0.877989
+-0.153469 0.134588 -0.314671 -0.926993
+-0.169723 0.113405 -0.190983 -0.960135
+-0.183073 0.0902818 -0.064026 -0.976849
+-0.193291 0.0656135 0.0640263 -0.976849
+-0.200202 0.0398226 0.190983 -0.960135
+-0.203687 0.0133503 0.314672 -0.926992
+0.305531 0.0200255 -0.71573 0.627679
+0.300303 0.059734 -0.791535 0.528887
+0.289937 0.0984203 -0.853797 0.421046
+0.27461 0.135423 -0.901451 0.306001
+0.254585 0.170108 -0.93368 0.18572
+0.230203 0.201883 -0.949933 0.0622619
+0.201883 0.230203 -0.949933 -0.0622619
+0.170108 0.254585 -0.93368 -0.18572
+0.135423 0.27461 -0.901451 -0.306001
+0.0984203 0.289937 -0.853797 -0.421046
+0.059734 0.300303 -0.791536 -0.528887
+0.0200255 0.305531 -0.71573 -0.627679
+-0.0200255 0.305531 -0.627678 -0.71573
+-0.059734 0.300303 -0.528887 -0.791535
+-0.0984204 0.289937 -0.421046 -0.853797
+-0.135423 0.27461 -0.306001 -0.901451
+-0.170108 0.254585 -0.18572 -0.93368
+-0.201883 0.230203 -0.0622618 -0.949933
+-0.230203 0.201883 0.062262 -0.949933
+-0.254585 0.170108 0.185721 -0.93368
+-0.27461 0.135423 0.306001 -0.901451
+-0.289937 0.0984203 0.421047 -0.853797
+-0.300303 0.0597339 0.528887 -0.791535
+-0.305531 0.0200255 0.627679 -0.71573
+0.305531 0.0200255 -0.93368 0.18572
+0.300303 0.059734 -0.949933 0.0622619
+0.289937 0.0984203 -0.949933 -0.0622619
+0.27461 0.135423 -0.93368 -0.18572
+0.254585 0.170108 -0.901451 -0.306001
+0.230203 0.201883 -0.853797 -0.421046
+0.201883 0.230203 -0.791535 -0.528887
+0.170108 0.254585 -0.71573 -0.627679
+0.135423 0.27461 -0.627679 -0.71573
+0.0984203 0.289937 -0.528887 -0.791535
+0.059734 0.300303 -0.421046 -0.853797
+0.0200255 0.305531 -0.306001 -0.901451
+-0.0200255 0.305531 -0.18572 -0.93368
+-0.059734 0.300303 -0.0622619 -0.949933
+-0.0984204 0.289937 0.062262 -0.949933
+-0.135423 0.27461 0.185721 -0.93368
+-0.170108 0.254585 0.306001 -0.901451
+-0.201883 0.230203 0.421046 -0.853797
+-0.230203 0.201883 0.528887 -0.791535
+-0.254585 0.170108 0.627679 -0.71573
+-0.27461 0.135423 0.71573 -0.627678
+-0.289937 0.0984203 0.791536 -0.528887
+-0.300303 0.0597339 0.853798 -0.421046
+-0.305531 0.0200255 0.901451 -0.306001
+0.407374 0.0267007 -0.905061 0.119154
+0.400404 0.0796453 -0.912871 -4.98786e-09
+0.386583 0.131227 -0.905061 -0.119154
+0.366147 0.180564 -0.881766 -0.236268
+0.339446 0.226811 -0.843383 -0.349341
+0.306937 0.269177 -0.790569 -0.456435
+0.269177 0.306937 -0.724229 -0.555721
+0.226811 0.339446 -0.645497 -0.645497
+0.180564 0.366147 -0.555721 -0.724229
+0.131227 0.386583 -0.456435 -0.790569
+0.0796453 0.400404 -0.349341 -0.843383
+0.0267007 0.407374 -0.236268 -0.881766
+-0.0267007 0.407374 -0.119154 -0.905061
+-0.0796453 0.400404 4.0822e-09 -0.912871
+-0.131227 0.386583 0.119154 -0.905061
+-0.180564 0.366147 0.236269 -0.881766
+-0.226811 0.339446 0.349341 -0.843383
+-0.269177 0.306937 0.456436 -0.790569
+-0.306937 0.269177 0.555721 -0.724229
+-0.339446 0.226811 0.645497 -0.645497
+-0.366147 0.180564 0.724229 -0.555721
+-0.386583 0.131227 0.79057 -0.456435
+-0.400404 0.0796452 0.843383 -0.34934
+-0.407374 0.0267006 0.881766 -0.236268
+0.407374 0.0267007 -0.790569 0.456435
+0.400404 0.0796453 -0.843383 0.349341
+0.386583 0.131227 -0.881766 0.236268
+0.366147 0.180564 -0.905061 0.119154
+0.339446 0.226811 -0.912871 -1.36073e-09
+0.306937 0.269177 -0.905061 -0.119154
+0.269177 0.306937 -0.881766 -0.236268
+0.226811 0.339446 -0.843383 -0.349341
+0.180564 0.366147 -0.790569 -0.456435
+0.131227 0.386583 -0.724229 -0.555721
+0.0796453 0.400404 -0.645497 -0.645497
+0.0267007 0.407374 -0.555721 -0.724229
+-0.0267007 0.407374 -0.456435 -0.790569
+-0.0796453 0.400404 -0.349341 -0.843383
+-0.131227 0.386583 -0.236268 -0.881766
+-0.180564 0.366147 -0.119153 -0.905061
+-0.226811 0.339446 1.50086e-07 -0.912871
+-0.269177 0.306937 0.119154 -0.905061
+-0.306937 0.269177 0.236268 -0.881766
+-0.339446 0.226811 0.349341 -0.843383
+-0.366147 0.180564 0.456436 -0.790569
+-0.386583 0.131227 0.555721 -0.724229
+-0.400404 0.0796452 0.645497 -0.645497
+-0.407374 0.0267006 0.724229 -0.55572
+0.498929 0.0327016 -0.820066 0.278375
+0.490393 0.0975452 -0.849385 0.168953
+0.473465 0.16072 -0.864171 0.0566408
+0.448436 0.221144 -0.864171 -0.0566408
+0.415735 0.277785 -0.849385 -0.168953
+0.37592 0.329673 -0.820066 -0.278375
+0.329673 0.37592 -0.776715 -0.383033
+0.277785 0.415735 -0.720074 -0.481138
+0.221144 0.448436 -0.651112 -0.57101
+0.16072 0.473465 -0.57101 -0.651112
+0.0975452 0.490393 -0.481138 -0.720074
+0.0327015 0.498929 -0.383033 -0.776715
+-0.0327016 0.498929 -0.278375 -0.820066
+-0.0975452 0.490393 -0.168953 -0.849385
+-0.16072 0.473465 -0.0566407 -0.864171
+-0.221144 0.448436 0.0566409 -0.864171
+-0.277785 0.415735 0.168953 -0.849385
+-0.329673 0.37592 0.278375 -0.820066
+-0.37592 0.329673 0.383033 -0.776715
+-0.415735 0.277785 0.481138 -0.720074
+-0.448436 0.221144 0.57101 -0.651112
+-0.473465 0.16072 0.651113 -0.57101
+-0.490393 0.097545 0.720074 -0.481138
+-0.498929 0.0327015 0.776715 -0.383033
+0.305531 0.0200255 -0.306001 0.901451
+0.300303 0.059734 -0.421046 0.853797
+0.289937 0.0984203 -0.528887 0.791535
+0.27461 0.135423 -0.627679 0.71573
+0.254585 0.170108 -0.71573 0.627679
+0.230203 0.201883 -0.791535 0.528887
+0.201883 0.230203 -0.853797 0.421046
+0.170108 0.254585 -0.901451 0.306001
+0.135423 0.27461 -0.93368 0.18572
+0.0984203 0.289937 -0.949933 0.0622619
+0.059734 0.300303 -0.949933 -0.0622619
+0.0200255 0.305531 -0.93368 -0.185721
+-0.0200255 0.305531 -0.901451 -0.306001
+-0.059734 0.300303 -0.853797 -0.421046
+-0.0984204 0.289937 -0.791535 -0.528887
+-0.135423 0.27461 -0.71573 -0.627679
+-0.170108 0.254585 -0.627678 -0.71573
+-0.201883 0.230203 -0.528887 -0.791536
+-0.230203 0.201883 -0.421046 -0.853797
+-0.254585 0.170108 -0.306001 -0.901451
+-0.27461 0.135423 -0.18572 -0.93368
+-0.289937 0.0984203 -0.0622617 -0.949933
+-0.300303 0.0597339 0.0622622 -0.949933
+-0.305531 0.0200255 0.185721 -0.93368
+0.407374 0.0267007 -0.555721 0.724229
+0.400404 0.0796453 -0.645497 0.645497
+0.386583 0.131227 -0.724229 0.555721
+0.366147 0.180564 -0.790569 0.456435
+0.339446 0.226811 -0.843383 0.349341
+0.306937 0.269177 -0.881766 0.236268
+0.269177 0.306937 -0.905061 0.119154
+0.226811 0.339446 -0.912871 -3.85421e-08
+0.180564 0.366147 -0.905061 -0.119154
+0.131227 0.386583 -0.881766 -0.236268
+0.0796453 0.400404 -0.843383 -0.349341
+0.0267007 0.407374 -0.790569 -0.456436
+-0.0267007 0.407374 -0.724229 -0.555721
+-0.0796453 0.400404 -0.645497 -0.645497
+-0.131227 0.386583 -0.555721 -0.724229
+-0.180564 0.366147 -0.456435 -0.79057
+-0.226811 0.339446 -0.34934 -0.843383
+-0.269177 0.306937 -0.236268 -0.881766
+-0.306937 0.269177 -0.119153 -0.905061
+-0.339446 0.226811 7.8445e-08 -0.912871
+-0.366147 0.180564 0.119154 -0.905061
+-0.386583 0.131227 0.236269 -0.881766
+-0.400404 0.0796452 0.349341 -0.843383
+-0.407374 0.0267006 0.456436 -0.790569
+0.407374 0.0267007 -0.236268 0.881766
+0.400404 0.0796453 -0.349341 0.843383
+0.386583 0.131227 -0.456435 0.790569
+0.366147 0.180564 -0.555721 0.724229
+0.339446 0.226811 -0.645497 0.645497
+0.306937 0.269177 -0.724229 0.555721
+0.269177 0.306937 -0.790569 0.456435
+0.226811 0.339446 -0.843383 0.349341
+0.180564 0.366147 -0.881766 0.236268
+0.131227 0.386583 -0.905061 0.119154
+0.0796453 0.400404 -0.912871 3.30992e-08
+0.0267007 0.407374 -0.905061 -0.119154
+-0.0267007 0.407374 -0.881766 -0.236268
+-0.0796453 0.400404 -0.843383 -0.349341
+-0.131227 0.386583 -0.790569 -0.456436
+-0.180564 0.366147 -0.724229 -0.555721
+-0.226811 0.339446 -0.645497 -0.645497
+-0.269177 0.306937 -0.555721 -0.724229
+-0.306937 0.269177 -0.456435 -0.790569
+-0.339446 0.226811 -0.349341 -0.843383
+-0.366147 0.180564 -0.236268 -0.881766
+-0.386583 0.131227 -0.119153 -0.905061
+-0.400404 0.0796452 2.24449e-07 -0.912871
+-0.407374 0.0267006 0.119154 -0.905061
+0.498929 0.0327016 -0.383033 0.776715
+0.490393 0.0975452 -0.481138 0.720074
+0.473465 0.16072 -0.57101 0.651112
+0.448436 0.221144 -0.651112 0.57101
+0.415735 0.277785 -0.720074 0.481138
+0.37592 0.329673 -0.776715 0.383033
+0.329673 0.37592 -0.820066 0.278375
+0.277785 0.415735 -0.849385 0.168953
+0.221144 0.448436 -0.864171 0.0566408
+0.16072 0.473465 -0.864171 -0.0566408
+0.0975452 0.490393 -0.849385 -0.168953
+0.0327015 0.498929 -0.820066 -0.278375
+-0.0327016 0.498929 -0.776715 -0.383033
+-0.0975452 0.490393 -0.720074 -0.481138
+-0.16072 0.473465 -0.651112 -0.57101
+-0.221144 0.448436 -0.57101 -0.651112
+-0.277785 0.415735 -0.481138 -0.720074
+-0.329673 0.37592 -0.383033 -0.776715
+-0.37592 0.329673 -0.278375 -0.820066
+-0.415735 0.277785 -0.168953 -0.849385
+-0.448436 0.221144 -0.0566407 -0.864171
+-0.473465 0.16072 0.056641 -0.864171
+-0.490393 0.097545 0.168953 -0.849385
+-0.498929 0.0327015 0.278375 -0.820065
+0.498929 0.0327016 -0.651112 0.57101
+0.490393 0.0975452 -0.720074 0.481138
+0.473465 0.16072 -0.776715 0.383033
+0.448436 0.221144 -0.820066 0.278375
+0.415735 0.277785 -0.849385 0.168953
+0.37592 0.329673 -0.864171 0.0566408
+0.329673 0.37592 -0.864171 -0.0566408
+0.277785 0.415735 -0.849385 -0.168953
+0.221144 0.448436 -0.820066 -0.278375
+0.16072 0.473465 -0.776715 -0.383033
+0.0975452 0.490393 -0.720074 -0.481138
+0.0327015 0.498929 -0.651112 -0.57101
+-0.0327016 0.498929 -0.57101 -0.651112
+-0.0975452 0.490393 -0.481138 -0.720074
+-0.16072 0.473465 -0.383033 -0.776715
+-0.221144 0.448436 -0.278375 -0.820066
+-0.277785 0.415735 -0.168953 -0.849385
+-0.329673 0.37592 -0.0566407 -0.864171
+-0.37592 0.329673 0.0566409 -0.864171
+-0.415735 0.277785 0.168953 -0.849385
+-0.448436 0.221144 0.278375 -0.820066
+-0.473465 0.16072 0.383033 -0.776714
+-0.490393 0.097545 0.481138 -0.720074
+-0.498929 0.0327015 0.57101 -0.651112
+0.576114 0.0377605 -0.707107 0.408248
+0.566257 0.112635 -0.754344 0.31246
+0.54671 0.185583 -0.788675 0.211325
+0.51781 0.255356 -0.809511 0.106574
+0.480049 0.320759 -0.816497 -1.21708e-09
+0.434075 0.380673 -0.809511 -0.106574
+0.380673 0.434075 -0.788675 -0.211325
+0.320759 0.480049 -0.754344 -0.31246
+0.255355 0.51781 -0.707107 -0.408248
+0.185583 0.54671 -0.64777 -0.497052
+0.112635 0.566257 -0.57735 -0.57735
+0.0377605 0.576114 -0.497052 -0.64777
+-0.0377605 0.576114 -0.408248 -0.707107
+-0.112635 0.566257 -0.31246 -0.754344
+-0.185583 0.54671 -0.211325 -0.788675
+-0.255356 0.51781 -0.106574 -0.809511
+-0.320759 0.480049 1.34241e-07 -0.816497
+-0.380674 0.434075 0.106574 -0.809511
+-0.434075 0.380673 0.211325 -0.788675
+-0.480049 0.320759 0.31246 -0.754344
+-0.51781 0.255355 0.408248 -0.707107
+-0.54671 0.185583 0.497052 -0.64777
+-0.566257 0.112635 0.57735 -0.57735
+-0.576114 0.0377604 0.64777 -0.497051
+0.576114 0.0377605 -0.497052 0.64777
+0.566257 0.112635 -0.57735 0.57735
+0.54671 0.185583 -0.64777 0.497052
+0.51781 0.255356 -0.707107 0.408248
+0.480049 0.320759 -0.754344 0.31246
+0.434075 0.380673 -0.788675 0.211325
+0.380673 0.434075 -0.809511 0.106574
+0.320759 0.480049 -0.816497 -3.44731e-08
+0.255355 0.51781 -0.809511 -0.106574
+0.185583 0.54671 -0.788675 -0.211325
+0.112635 0.566257 -0.754344 -0.31246
+0.0377605 0.576114 -0.707107 -0.408248
+-0.0377605 0.576114 -0.64777 -0.497052
+-0.112635 0.566257 -0.57735 -0.57735
+-0.185583 0.54671 -0.497052 -0.64777
+-0.255356 0.51781 -0.408248 -0.707107
+-0.320759 0.480049 -0.31246 -0.754345
+-0.380674 0.434075 -0.211325 -0.788675
+-0.434075 0.380673 -0.106574 -0.809511
+-0.480049 0.320759 7.01633e-08 -0.816497
+-0.51781 0.255355 0.106574 -0.809511
+-0.54671 0.185583 0.211325 -0.788675
+-0.566257 0.112635 0.31246 -0.754344
+-0.576114 0.0377604 0.408248 -0.707107
+0.644115 0.0422175 -0.574227 0.503584
+0.633094 0.12593 -0.635045 0.424324
+0.611241 0.207488 -0.684998 0.337804
+0.578929 0.285496 -0.72323 0.245503
+0.536711 0.358619 -0.749087 0.149003
+0.485311 0.425606 -0.762127 0.0499525
+0.425606 0.485311 -0.762127 -0.0499525
+0.358619 0.536711 -0.749087 -0.149003
+0.285496 0.578929 -0.72323 -0.245503
+0.207488 0.611241 -0.684998 -0.337804
+0.12593 0.633094 -0.635045 -0.424324
+0.0422175 0.644115 -0.574227 -0.503584
+-0.0422176 0.644115 -0.503584 -0.574227
+-0.12593 0.633094 -0.424324 -0.635045
+-0.207488 0.611241 -0.337804 -0.684998
+-0.285496 0.578929 -0.245503 -0.72323
+-0.358619 0.536711 -0.149003 -0.749087
+-0.425606 0.48531 -0.0499524 -0.762127
+-0.485311 0.425606 0.0499526 -0.762127
+-0.536711 0.358619 0.149003 -0.749087
+-0.578929 0.285496 0.245503 -0.72323
+-0.611241 0.207488 0.337804 -0.684998
+-0.633094 0.12593 0.424324 -0.635045
+-0.644115 0.0422174 0.503584 -0.574227
+0.101844 0.00667518 -0.655903 -0.747914
+0.100101 0.0199113 -0.552669 -0.827128
+0.0966457 0.0328068 -0.439979 -0.892189
+0.0915367 0.0451409 -0.319761 -0.941985
+0.0848615 0.0567026 -0.194072 -0.975664
+0.0767343 0.0672942 -0.0650616 -0.992648
+0.0672942 0.0767343 0.0650616 -0.992648
+0.0567026 0.0848615 0.194072 -0.975664
+0.0451409 0.0915367 0.319761 -0.941985
+0.0328068 0.0966457 0.439979 -0.892189
+0.0199113 0.100101 0.552669 -0.827128
+0.00667517 0.101844 0.655903 -0.747914
+-0.00667518 0.101844 0.747914 -0.655903
+-0.0199113 0.100101 0.827128 -0.552669
+-0.0328068 0.0966456 0.892189 -0.439979
+-0.0451409 0.0915367 0.941985 -0.319761
+-0.0567027 0.0848615 0.975664 -0.194071
+-0.0672942 0.0767343 0.992648 -0.0650615
+-0.0767343 0.0672942 0.992648 0.0650617
+-0.0848615 0.0567026 0.975664 0.194072
+-0.0915367 0.0451409 0.941985 0.319761
+-0.0966457 0.0328068 0.892189 0.439979
+-0.100101 0.0199113 0.827128 0.552669
+-0.101844 0.00667516 0.747914 0.655903
+0.203687 0.0133504 -0.314672 -0.926993
+0.200202 0.0398226 -0.190983 -0.960135
+0.193291 0.0656136 -0.0640261 -0.976849
+0.183073 0.0902818 0.0640261 -0.976849
+0.169723 0.113405 0.190983 -0.960135
+0.153469 0.134588 0.314672 -0.926993
+0.134588 0.153469 0.432976 -0.877989
+0.113405 0.169723 0.543873 -0.813963
+0.0902818 0.183073 0.645463 -0.73601
+0.0656136 0.193291 0.73601 -0.645463
+0.0398227 0.200202 0.813963 -0.543873
+0.0133503 0.203687 0.877989 -0.432976
+-0.0133504 0.203687 0.926993 -0.314672
+-0.0398226 0.200202 0.960135 -0.190983
+-0.0656136 0.193291 0.976849 -0.064026
+-0.0902818 0.183073 0.976849 0.0640263
+-0.113405 0.169723 0.960135 0.190983
+-0.134588 0.153469 0.926992 0.314672
+-0.153469 0.134588 0.877989 0.432976
+-0.169723 0.113405 0.813963 0.543873
+-0.183073 0.0902818 0.73601 0.645463
+-0.193291 0.0656135 0.645463 0.73601
+-0.200202 0.0398226 0.543873 0.813963
+-0.203687 0.0133503 0.432976 0.877989
+0.203687 0.0133504 -0.877989 -0.432976
+0.200202 0.0398226 -0.813963 -0.543873
+0.193291 0.0656136 -0.73601 -0.645463
+0.183073 0.0902818 -0.645463 -0.73601
+0.169723 0.113405 -0.543873 -0.813963
+0.153469 0.134588 -0.432976 -0.877989
+0.134588 0.153469 -0.314672 -0.926993
+0.113405 0.169723 -0.190983 -0.960135
+0.0902818 0.183073 -0.0640261 -0.976849
+0.0656136 0.193291 0.0640261 -0.976849
+0.0398227 0.200202 0.190983 -0.960135
+0.0133503 0.203687 0.314672 -0.926993
+-0.0133504 0.203687 0.432976 -0.877989
+-0.0398226 0.200202 0.543873 -0.813963
+-0.0656136 0.193291 0.645463 -0.73601
+-0.0902818 0.183073 0.73601 -0.645463
+-0.113405 0.169723 0.813963 -0.543873
+-0.134588 0.153469 0.877989 -0.432976
+-0.153469 0.134588 0.926993 -0.314671
+-0.169723 0.113405 0.960135 -0.190983
+-0.183073 0.0902818 0.976849 -0.064026
+-0.193291 0.0656135 0.976849 0.0640263
+-0.200202 0.0398226 0.960135 0.190983
+-0.203687 0.0133503 0.926992 0.314672
+0.305531 0.0200255 -0.627679 -0.71573
+0.300303 0.059734 -0.528887 -0.791535
+0.289937 0.0984203 -0.421046 -0.853797
+0.27461 0.135423 -0.306001 -0.901451
+0.254585 0.170108 -0.18572 -0.93368
+0.230203 0.201883 -0.0622619 -0.949933
+0.201883 0.230203 0.0622619 -0.949933
+0.170108 0.254585 0.18572 -0.93368
+0.135423 0.27461 0.306001 -0.901451
+0.0984203 0.289937 0.421046 -0.853797
+0.059734 0.300303 0.528887 -0.791536
+0.0200255 0.305531 0.627679 -0.71573
+-0.0200255 0.305531 0.71573 -0.627678
+-0.059734 0.300303 0.791535 -0.528887
+-0.0984204 0.289937 0.853797 -0.421046
+-0.135423 0.27461 0.901451 -0.306001
+-0.170108 0.254585 0.93368 -0.18572
+-0.201883 0.230203 0.949933 -0.0622618
+-0.230203 0.201883 0.949933 0.062262
+-0.254585 0.170108 0.93368 0.185721
+-0.27461 0.135423 0.901451 0.306001
+-0.289937 0.0984203 0.853797 0.421047
+-0.300303 0.0597339 0.791535 0.528887
+-0.305531 0.0200255 0.71573 0.627679
+0.305531 0.0200255 -0.18572 -0.93368
+0.300303 0.059734 -0.0622619 -0.949933
+0.289937 0.0984203 0.0622619 -0.949933
+0.27461 0.135423 0.18572 -0.93368
+0.254585 0.170108 0.306001 -0.901451
+0.230203 0.201883 0.421046 -0.853797
+0.201883 0.230203 0.528887 -0.791535
+0.170108 0.254585 0.627679 -0.71573
+0.135423 0.27461 0.71573 -0.627679
+0.0984203 0.289937 0.791535 -0.528887
+0.059734 0.300303 0.853797 -0.421046
+0.0200255 0.305531 0.901451 -0.306001
+-0.0200255 0.305531 0.93368 -0.18572
+-0.059734 0.300303 0.949933 -0.0622619
+-0.0984204 0.289937 0.949933 0.062262
+-0.135423 0.27461 0.93368 0.185721
+-0.170108 0.254585 0.901451 0.306001
+-0.201883 0.230203 0.853797 0.421046
+-0.230203 0.201883 0.791535 0.528887
+-0.254585 0.170108 0.71573 0.627679
+-0.27461 0.135423 0.627678 0.71573
+-0.289937 0.0984203 0.528887 0.791536
+-0.300303 0.0597339 0.421046 0.853798
+-0.305531 0.0200255 0.306001 0.901451
+0.407374 0.0267007 -0.119154 -0.905061
+0.400404 0.0796453 4.98786e-09 -0.912871
+0.386583 0.131227 0.119154 -0.905061
+0.366147 0.180564 0.236268 -0.881766
+0.339446 0.226811 0.349341 -0.843383
+0.306937 0.269177 0.456435 -0.790569
+0.269177 0.306937 0.555721 -0.724229
+0.226811 0.339446 0.645497 -0.645497
+0.180564 0.366147 0.724229 -0.555721
+0.131227 0.386583 0.790569 -0.456435
+0.0796453 0.400404 0.843383 -0.349341
+0.0267007 0.407374 0.881766 -0.236268
+-0.0267007 0.407374 0.905061 -0.119154
+-0.0796453 0.400404 0.912871 4.0822e-09
+-0.131227 0.386583 0.905061 0.119154
+-0.180564 0.366147 0.881766 0.236269
+-0.226811 0.339446 0.843383 0.349341
+-0.269177 0.306937 0.790569 0.456436
+-0.306937 0.269177 0.724229 0.555721
+-0.339446 0.226811 0.645497 0.645497
+-0.366147 0.180564 0.555721 0.724229
+-0.386583 0.131227 0.456435 0.79057
+-0.400404 0.0796452 0.34934 0.843383
+-0.407374 0.0267006 0.236268 0.881766
+0.407374 0.0267007 -0.456435 -0.790569
+0.400404 0.0796453 -0.349341 -0.843383
+0.386583 0.131227 -0.236268 -0.881766
+0.366147 0.180564 -0.119154 -0.905061
+0.339446 0.226811 1.36073e-09 -0.912871
+0.306937 0.269177 0.119154 -0.905061
+0.269177 0.306937 0.236268 -0.881766
+0.226811 0.339446 0.349341 -0.843383
+0.180564 0.366147 0.456435 -0.790569
+0.131227 0.386583 0.555721 -0.724229
+0.0796453 0.400404 0.645497 -0.645497
+0.0267007 0.407374 0.724229 -0.555721
+-0.0267007 0.407374 0.790569 -0.456435
+-0.0796453 0.400404 0.843383 -0.349341
+-0.131227 0.386583 0.881766 -0.236268
+-0.180564 0.366147 0.905061 -0.119153
+-0.226811 0.339446 0.912871 1.50086e-07
+-0.269177 0.306937 0.905061 0.119154
+-0.306937 0.269177 0.881766 0.236268
+-0.339446 0.226811 0.843383 0.349341
+-0.366147 0.180564 0.790569 0.456436
+-0.386583 0.131227 0.724229 0.555721
+-0.400404 0.0796452 0.645497 0.645497
+-0.407374 0.0267006 0.55572 0.724229
+0.498929 0.0327016 -0.278375 -0.820066
+0.490393 0.0975452 -0.168953 -0.849385
+0.473465 0.16072 -0.0566408 -0.864171
+0.448436 0.221144 0.0566408 -0.864171
+0.415735 0.277785 0.168953 -0.849385
+0.37592 0.329673 0.278375 -0.820066
+0.329673 0.37592 0.383033 -0.776715
+0.277785 0.415735 0.481138 -0.720074
+0.221144 0.448436 0.57101 -0.651112
+0.16072 0.473465 0.651112 -0.57101
+0.0975452 0.490393 0.720074 -0.481138
+0.0327015 0.498929 0.776715 -0.383033
+-0.0327016 0.498929 0.820066 -0.278375
+-0.0975452 0.490393 0.849385 -0.168953
+-0.16072 0.473465 0.864171 -0.0566407
+-0.221144 0.448436 0.864171 0.0566409
+-0.277785 0.415735 0.849385 0.168953
+-0.329673 0.37592 0.820066 0.278375
+-0.37592 0.329673 0.776715 0.383033
+-0.415735 0.277785 0.720074 0.481138
+-0.448436 0.221144 0.651112 0.57101
+-0.473465 0.16072 0.57101 0.651113
+-0.490393 0.097545 0.481138 0.720074
+-0.498929 0.0327015 0.383033 0.776715
+0.305531 0.0200255 -0.901451 -0.306001
+0.300303 0.059734 -0.853797 -0.421046
+0.289937 0.0984203 -0.791535 -0.528887
+0.27461 0.135423 -0.71573 -0.627679
+0.254585 0.170108 -0.627679 -0.71573
+0.230203 0.201883 -0.528887 -0.791535
+0.201883 0.230203 -0.421046 -0.853797
+0.170108 0.254585 -0.306001 -0.901451
+0.135423 0.27461 -0.18572 -0.93368
+0.0984203 0.289937 -0.0622619 -0.949933
+0.059734 0.300303 0.0622619 -0.949933
+0.0200255 0.305531 0.185721 -0.93368
+-0.0200255 0.305531 0.306001 -0.901451
+-0.059734 0.300303 0.421046 -0.853797
+-0.0984204 0.289937 0.528887 -0.791535
+-0.135423 0.27461 0.627679 -0.71573
+-0.170108 0.254585 0.71573 -0.627678
+-0.201883 0.230203 0.791536 -0.528887
+-0.230203 0.201883 0.853797 -0.421046
+-0.254585 0.170108 0.901451 -0.306001
+-0.27461 0.135423 0.93368 -0.18572
+-0.289937 0.0984203 0.949933 -0.0622617
+-0.300303 0.0597339 0.949933 0.0622622
+-0.305531 0.0200255 0.93368 0.185721
+0.407374 0.0267007 -0.724229 -0.555721
+0.400404 0.0796453 -0.645497 -0.645497
+0.386583 0.131227 -0.555721 -0.724229
+0.366147 0.180564 -0.456435 -0.790569
+0.339446 0.226811 -0.349341 -0.843383
+0.306937 0.269177 -0.236268 -0.881766
+0.269177 0.306937 -0.119154 -0.905061
+0.226811 0.339446 3.85421e-08 -0.912871
+0.180564 0.366147 0.119154 -0.905061
+0.131227 0.386583 0.236268 -0.881766
+0.0796453 0.400404 0.349341 -0.843383
+0.0267007 0.407374 0.456436 -0.790569
+-0.0267007 0.407374 0.555721 -0.724229
+-0.0796453 0.400404 0.645497 -0.645497
+-0.131227 0.386583 0.724229 -0.555721
+-0.180564 0.366147 0.79057 -0.456435
+-0.226811 0.339446 0.843383 -0.34934
+-0.269177 0.306937 0.881766 -0.236268
+-0.306937 0.269177 0.905061 -0.119153
+-0.339446 0.226811 0.912871 7.8445e-08
+-0.366147 0.180564 0.905061 0.119154
+-0.386583 0.131227 0.881766 0.236269
+-0.400404 0.0796452 0.843383 0.349341
+-0.407374 0.0267006 0.790569 0.456436
+0.407374 0.0267007 -0.881766 -0.236268
+0.400404 0.0796453 -0.843383 -0.349341
+0.386583 0.131227 -0.790569 -0.456435
+0.366147 0.180564 -0.724229 -0.555721
+0.339446 0.226811 -0.645497 -0.645497
+0.306937 0.269177 -0.555721 -0.724229
+0.269177 0.306937 -0.456435 -0.790569
+0.226811 0.339446 -0.349341 -0.843383
+0.180564 0.366147 -0.236268 -0.881766
+0.131227 0.386583 -0.119154 -0.905061
+0.0796453 0.400404 -3.30992e-08 -0.912871
+0.0267007 0.407374 0.119154 -0.905061
+-0.0267007 0.407374 0.236268 -0.881766
+-0.0796453 0.400404 0.349341 -0.843383
+-0.131227 0.386583 0.456436 -0.790569
+-0.180564 0.366147 0.555721 -0.724229
+-0.226811 0.339446 0.645497 -0.645497
+-0.269177 0.306937 0.724229 -0.555721
+-0.306937 0.269177 0.790569 -0.456435
+-0.339446 0.226811 0.843383 -0.349341
+-0.366147 0.180564 0.881766 -0.236268
+-0.386583 0.131227 0.905061 -0.119153
+-0.400404 0.0796452 0.912871 2.24449e-07
+-0.407374 0.0267006 0.905061 0.119154
+0.498929 0.0327016 -0.776715 -0.383033
+0.490393 0.0975452 -0.720074 -0.481138
+0.473465 0.16072 -0.651112 -0.57101
+0.448436 0.221144 -0.57101 -0.651112
+0.415735 0.277785 -0.481138 -0.720074
+0.37592 0.329673 -0.383033 -0.776715
+0.329673 0.37592 -0.278375 -0.820066
+0.277785 0.415735 -0.168953 -0.849385
+0.221144 0.448436 -0.0566408 -0.864171
+0.16072 0.473465 0.0566408 -0.864171
+0.0975452 0.490393 0.168953 -0.849385
+0.0327015 0.498929 0.278375 -0.820066
+-0.0327016 0.498929 0.383033 -0.776715
+-0.0975452 0.490393 0.481138 -0.720074
+-0.16072 0.473465 0.57101 -0.651112
+-0.221144 0.448436 0.651112 -0.57101
+-0.277785 0.415735 0.720074 -0.481138
+-0.329673 0.37592 0.776715 -0.383033
+-0.37592 0.329673 0.820066 -0.278375
+-0.415735 0.277785 0.849385 -0.168953
+-0.448436 0.221144 0.864171 -0.0566407
+-0.473465 0.16072 0.864171 0.056641
+-0.490393 0.097545 0.849385 0.168953
+-0.498929 0.0327015 0.820065 0.278375
+0.498929 0.0327016 -0.57101 -0.651112
+0.490393 0.0975452 -0.481138 -0.720074
+0.473465 0.16072 -0.383033 -0.776715
+0.448436 0.221144 -0.278375 -0.820066
+0.415735 0.277785 -0.168953 -0.849385
+0.37592 0.329673 -0.0566408 -0.864171
+0.329673 0.37592 0.0566408 -0.864171
+0.277785 0.415735 0.168953 -0.849385
+0.221144 0.448436 0.278375 -0.820066
+0.16072 0.473465 0.383033 -0.776715
+0.0975452 0.490393 0.481138 -0.720074
+0.0327015 0.498929 0.57101 -0.651112
+-0.0327016 0.498929 0.651112 -0.57101
+-0.0975452 0.490393 0.720074 -0.481138
+-0.16072 0.473465 0.776715 -0.383033
+-0.221144 0.448436 0.820066 -0.278375
+-0.277785 0.415735 0.849385 -0.168953
+-0.329673 0.37592 0.864171 -0.0566407
+-0.37592 0.329673 0.864171 0.0566409
+-0.415735 0.277785 0.849385 0.168953
+-0.448436 0.221144 0.820066 0.278375
+-0.473465 0.16072 0.776714 0.383033
+-0.490393 0.097545 0.720074 0.481138
+-0.498929 0.0327015 0.651112 0.57101
+0.576114 0.0377605 -0.408248 -0.707107
+0.566257 0.112635 -0.31246 -0.754344
+0.54671 0.185583 -0.211325 -0.788675
+0.51781 0.255356 -0.106574 -0.809511
+0.480049 0.320759 1.21708e-09 -0.816497
+0.434075 0.380673 0.106574 -0.809511
+0.380673 0.434075 0.211325 -0.788675
+0.320759 0.480049 0.31246 -0.754344
+0.255355 0.51781 0.408248 -0.707107
+0.185583 0.54671 0.497052 -0.64777
+0.112635 0.566257 0.57735 -0.57735
+0.0377605 0.576114 0.64777 -0.497052
+-0.0377605 0.576114 0.707107 -0.408248
+-0.112635 0.566257 0.754344 -0.31246
+-0.185583 0.54671 0.788675 -0.211325
+-0.255356 0.51781 0.809511 -0.106574
+-0.320759 0.480049 0.816497 1.34241e-07
+-0.380674 0.434075 0.809511 0.106574
+-0.434075 0.380673 0.788675 0.211325
+-0.480049 0.320759 0.754344 0.31246
+-0.51781 0.255355 0.707107 0.408248
+-0.54671 0.185583 0.64777 0.497052
+-0.566257 0.112635 0.57735 0.57735
+-0.576114 0.0377604 0.497051 0.64777
+0.576114 0.0377605 -0.64777 -0.497052
+0.566257 0.112635 -0.57735 -0.57735
+0.54671 0.185583 -0.497052 -0.64777
+0.51781 0.255356 -0.408248 -0.707107
+0.480049 0.320759 -0.31246 -0.754344
+0.434075 0.380673 -0.211325 -0.788675
+0.380673 0.434075 -0.106574 -0.809511
+0.320759 0.480049 3.44731e-08 -0.816497
+0.255355 0.51781 0.106574 -0.809511
+0.185583 0.54671 0.211325 -0.788675
+0.112635 0.566257 0.31246 -0.754344
+0.0377605 0.576114 0.408248 -0.707107
+-0.0377605 0.576114 0.497052 -0.64777
+-0.112635 0.566257 0.57735 -0.57735
+-0.185583 0.54671 0.64777 -0.497052
+-0.255356 0.51781 0.707107 -0.408248
+-0.320759 0.480049 0.754345 -0.31246
+-0.380674 0.434075 0.788675 -0.211325
+-0.434075 0.380673 0.809511 -0.106574
+-0.480049 0.320759 0.816497 7.01633e-08
+-0.51781 0.255355 0.809511 0.106574
+-0.54671 0.185583 0.788675 0.211325
+-0.566257 0.112635 0.754344 0.31246
+-0.576114 0.0377604 0.707107 0.408248
+0.644115 0.0422175 -0.503584 -0.574227
+0.633094 0.12593 -0.424324 -0.635045
+0.611241 0.207488 -0.337804 -0.684998
+0.578929 0.285496 -0.245503 -0.72323
+0.536711 0.358619 -0.149003 -0.749087
+0.485311 0.425606 -0.0499525 -0.762127
+0.425606 0.485311 0.0499525 -0.762127
+0.358619 0.536711 0.149003 -0.749087
+0.285496 0.578929 0.245503 -0.72323
+0.207488 0.611241 0.337804 -0.684998
+0.12593 0.633094 0.424324 -0.635045
+0.0422175 0.644115 0.503584 -0.574227
+-0.0422176 0.644115 0.574227 -0.503584
+-0.12593 0.633094 0.635045 -0.424324
+-0.207488 0.611241 0.684998 -0.337804
+-0.285496 0.578929 0.72323 -0.245503
+-0.358619 0.536711 0.749087 -0.149003
+-0.425606 0.48531 0.762127 -0.0499524
+-0.485311 0.425606 0.762127 0.0499526
+-0.536711 0.358619 0.749087 0.149003
+-0.578929 0.285496 0.72323 0.245503
+-0.611241 0.207488 0.684998 0.337804
+-0.633094 0.12593 0.635045 0.424324
+-0.644115 0.0422174 0.574227 0.503584
+0.101844 0.00667518 0.747914 -0.655903
+0.100101 0.0199113 0.827128 -0.552669
+0.0966457 0.0328068 0.892189 -0.439979
+0.0915367 0.0451409 0.941985 -0.319761
+0.0848615 0.0567026 0.975664 -0.194072
+0.0767343 0.0672942 0.992648 -0.0650616
+0.0672942 0.0767343 0.992648 0.0650616
+0.0567026 0.0848615 0.975664 0.194072
+0.0451409 0.0915367 0.941985 0.319761
+0.0328068 0.0966457 0.892189 0.439979
+0.0199113 0.100101 0.827128 0.552669
+0.00667517 0.101844 0.747914 0.655903
+-0.00667518 0.101844 0.655903 0.747914
+-0.0199113 0.100101 0.552669 0.827128
+-0.0328068 0.0966456 0.439979 0.892189
+-0.0451409 0.0915367 0.319761 0.941985
+-0.0567027 0.0848615 0.194071 0.975664
+-0.0672942 0.0767343 0.0650615 0.992648
+-0.0767343 0.0672942 -0.0650617 0.992648
+-0.0848615 0.0567026 -0.194072 0.975664
+-0.0915367 0.0451409 -0.319761 0.941985
+-0.0966457 0.0328068 -0.439979 0.892189
+-0.100101 0.0199113 -0.552669 0.827128
+-0.101844 0.00667516 -0.655903 0.747914
+0.203687 0.0133504 0.926993 -0.314672
+0.200202 0.0398226 0.960135 -0.190983
+0.193291 0.0656136 0.976849 -0.0640261
+0.183073 0.0902818 0.976849 0.0640261
+0.169723 0.113405 0.960135 0.190983
+0.153469 0.134588 0.926993 0.314672
+0.134588 0.153469 0.877989 0.432976
+0.113405 0.169723 0.813963 0.543873
+0.0902818 0.183073 0.73601 0.645463
+0.0656136 0.193291 0.645463 0.73601
+0.0398227 0.200202 0.543873 0.813963
+0.0133503 0.203687 0.432976 0.877989
+-0.0133504 0.203687 0.314672 0.926993
+-0.0398226 0.200202 0.190983 0.960135
+-0.0656136 0.193291 0.064026 0.976849
+-0.0902818 0.183073 -0.0640263 0.976849
+-0.113405 0.169723 -0.190983 0.960135
+-0.134588 0.153469 -0.314672 0.926992
+-0.153469 0.134588 -0.432976 0.877989
+-0.169723 0.113405 -0.543873 0.813963
+-0.183073 0.0902818 -0.645463 0.73601
+-0.193291 0.0656135 -0.73601 0.645463
+-0.200202 0.0398226 -0.813963 0.543873
+-0.203687 0.0133503 -0.877989 0.432976
+0.203687 0.0133504 0.432976 -0.877989
+0.200202 0.0398226 0.543873 -0.813963
+0.193291 0.0656136 0.645463 -0.73601
+0.183073 0.0902818 0.73601 -0.645463
+0.169723 0.113405 0.813963 -0.543873
+0.153469 0.134588 0.877989 -0.432976
+0.134588 0.153469 0.926993 -0.314672
+0.113405 0.169723 0.960135 -0.190983
+0.0902818 0.183073 0.976849 -0.0640261
+0.0656136 0.193291 0.976849 0.0640261
+0.0398227 0.200202 0.960135 0.190983
+0.0133503 0.203687 0.926993 0.314672
+-0.0133504 0.203687 0.877989 0.432976
+-0.0398226 0.200202 0.813963 0.543873
+-0.0656136 0.193291 0.73601 0.645463
+-0.0902818 0.183073 0.645463 0.73601
+-0.113405 0.169723 0.543873 0.813963
+-0.134588 0.153469 0.432976 0.877989
+-0.153469 0.134588 0.314671 0.926993
+-0.169723 0.113405 0.190983 0.960135
+-0.183073 0.0902818 0.064026 0.976849
+-0.193291 0.0656135 -0.0640263 0.976849
+-0.200202 0.0398226 -0.190983 0.960135
+-0.203687 0.0133503 -0.314672 0.926992
+0.305531 0.0200255 0.71573 -0.627679
+0.300303 0.059734 0.791535 -0.528887
+0.289937 0.0984203 0.853797 -0.421046
+0.27461 0.135423 0.901451 -0.306001
+0.254585 0.170108 0.93368 -0.18572
+0.230203 0.201883 0.949933 -0.0622619
+0.201883 0.230203 0.949933 0.0622619
+0.170108 0.254585 0.93368 0.18572
+0.135423 0.27461 0.901451 0.306001
+0.0984203 0.289937 0.853797 0.421046
+0.059734 0.300303 0.791536 0.528887
+0.0200255 0.305531 0.71573 0.627679
+-0.0200255 0.305531 0.627678 0.71573
+-0.059734 0.300303 0.528887 0.791535
+-0.0984204 0.289937 0.421046 0.853797
+-0.135423 0.27461 0.306001 0.901451
+-0.170108 0.254585 0.18572 0.93368
+-0.201883 0.230203 0.0622618 0.949933
+-0.230203 0.201883 -0.062262 0.949933
+-0.254585 0.170108 -0.185721 0.93368
+-0.27461 0.135423 -0.306001 0.901451
+-0.289937 0.0984203 -0.421047 0.853797
+-0.300303 0.0597339 -0.528887 0.791535
+-0.305531 0.0200255 -0.627679 0.71573
+0.305531 0.0200255 0.93368 -0.18572
+0.300303 0.059734 0.949933 -0.0622619
+0.289937 0.0984203 0.949933 0.0622619
+0.27461 0.135423 0.93368 0.18572
+0.254585 0.170108 0.901451 0.306001
+0.230203 0.201883 0.853797 0.421046
+0.201883 0.230203 0.791535 0.528887
+0.170108 0.254585 0.71573 0.627679
+0.135423 0.27461 0.627679 0.71573
+0.0984203 0.289937 0.528887 0.791535
+0.059734 0.300303 0.421046 0.853797
+0.0200255 0.305531 0.306001 0.901451
+-0.0200255 0.305531 0.18572 0.93368
+-0.059734 0.300303 0.0622619 0.949933
+-0.0984204 0.289937 -0.062262 0.949933
+-0.135423 0.27461 -0.185721 0.93368
+-0.170108 0.254585 -0.306001 0.901451
+-0.201883 0.230203 -0.421046 0.853797
+-0.230203 0.201883 -0.528887 0.791535
+-0.254585 0.170108 -0.627679 0.71573
+-0.27461 0.135423 -0.71573 0.627678
+-0.289937 0.0984203 -0.791536 0.528887
+-0.300303 0.0597339 -0.853798 0.421046
+-0.305531 0.0200255 -0.901451 0.306001
+0.407374 0.0267007 0.905061 -0.119154
+0.400404 0.0796453 0.912871 4.98786e-09
+0.386583 0.131227 0.905061 0.119154
+0.366147 0.180564 0.881766 0.236268
+0.339446 0.226811 0.843383 0.349341
+0.306937 0.269177 0.790569 0.456435
+0.269177 0.306937 0.724229 0.555721
+0.226811 0.339446 0.645497 0.645497
+0.180564 0.366147 0.555721 0.724229
+0.131227 0.386583 0.456435 0.790569
+0.0796453 0.400404 0.349341 0.843383
+0.0267007 0.407374 0.236268 0.881766
+-0.0267007 0.407374 0.119154 0.905061
+-0.0796453 0.400404 -4.0822e-09 0.912871
+-0.131227 0.386583 -0.119154 0.905061
+-0.180564 0.366147 -0.236269 0.881766
+-0.226811 0.339446 -0.349341 0.843383
+-0.269177 0.306937 -0.456436 0.790569
+-0.306937 0.269177 -0.555721 0.724229
+-0.339446 0.226811 -0.645497 0.645497
+-0.366147 0.180564 -0.724229 0.555721
+-0.386583 0.131227 -0.79057 0.456435
+-0.400404 0.0796452 -0.843383 0.34934
+-0.407374 0.0267006 -0.881766 0.236268
+0.407374 0.0267007 0.790569 -0.456435
+0.400404 0.0796453 0.843383 -0.349341
+0.386583 0.131227 0.881766 -0.236268
+0.366147 0.180564 0.905061 -0.119154
+0.339446 0.226811 0.912871 1.36073e-09
+0.306937 0.269177 0.905061 0.119154
+0.269177 0.306937 0.881766 0.236268
+0.226811 0.339446 0.843383 0.349341
+0.180564 0.366147 0.790569 0.456435
+0.131227 0.386583 0.724229 0.555721
+0.0796453 0.400404 0.645497 0.645497
+0.0267007 0.407374 0.555721 0.724229
+-0.0267007 0.407374 0.456435 0.790569
+-0.0796453 0.400404 0.349341 0.843383
+-0.131227 0.386583 0.236268 0.881766
+-0.180564 0.366147 0.119153 0.905061
+-0.226811 0.339446 -1.50086e-07 0.912871
+-0.269177 0.306937 -0.119154 0.905061
+-0.306937 0.269177 -0.236268 0.881766
+-0.339446 0.226811 -0.349341 0.843383
+-0.366147 0.180564 -0.456436 0.790569
+-0.386583 0.131227 -0.555721 0.724229
+-0.400404 0.0796452 -0.645497 0.645497
+-0.407374 0.0267006 -0.724229 0.55572
+0.498929 0.0327016 0.820066 -0.278375
+0.490393 0.0975452 0.849385 -0.168953
+0.473465 0.16072 0.864171 -0.0566408
+0.448436 0.221144 0.864171 0.0566408
+0.415735 0.277785 0.849385 0.168953
+0.37592 0.329673 0.820066 0.278375
+0.329673 0.37592 0.776715 0.383033
+0.277785 0.415735 0.720074 0.481138
+0.221144 0.448436 0.651112 0.57101
+0.16072 0.473465 0.57101 0.651112
+0.0975452 0.490393 0.481138 0.720074
+0.0327015 0.498929 0.383033 0.776715
+-0.0327016 0.498929 0.278375 0.820066
+-0.0975452 0.490393 0.168953 0.849385
+-0.16072 0.473465 0.0566407 0.864171
+-0.221144 0.448436 -0.0566409 0.864171
+-0.277785 0.415735 -0.168953 0.849385
+-0.329673 0.37592 -0.278375 0.820066
+-0.37592 0.329673 -0.383033 0.776715
+-0.415735 0.277785 -0.481138 0.720074
+-0.448436 0.221144 -0.57101 0.651112
+-0.473465 0.16072 -0.651113 0.57101
+-0.490393 0.097545 -0.720074 0.481138
+-0.498929 0.0327015 -0.776715 0.383033
+0.305531 0.0200255 0.306001 -0.901451
+0.300303 0.059734 0.421046 -0.853797
+0.289937 0.0984203 0.528887 -0.791535
+0.27461 0.135423 0.627679 -0.71573
+0.254585 0.170108 0.71573 -0.627679
+0.230203 0.201883 0.791535 -0.528887
+0.201883 0.230203 0.853797 -0.421046
+0.170108 0.254585 0.901451 -0.306001
+0.135423 0.27461 0.93368 -0.18572
+0.0984203 0.289937 0.949933 -0.0622619
+0.059734 0.300303 0.949933 0.0622619
+0.0200255 0.305531 0.93368 0.185721
+-0.0200255 0.305531 0.901451 0.306001
+-0.059734 0.300303 0.853797 0.421046
+-0.0984204 0.289937 0.791535 0.528887
+-0.135423 0.27461 0.71573 0.627679
+-0.170108 0.254585 0.627678 0.71573
+-0.201883 0.230203 0.528887 0.791536
+-0.230203 0.201883 0.421046 0.853797
+-0.254585 0.170108 0.306001 0.901451
+-0.27461 0.135423 0.18572 0.93368
+-0.289937 0.0984203 0.0622617 0.949933
+-0.300303 0.0597339 -0.0622622 0.949933
+-0.305531 0.0200255 -0.185721 0.93368
+0.407374 0.0267007 0.555721 -0.724229
+0.400404 0.0796453 0.645497 -0.645497
+0.386583 0.131227 0.724229 -0.555721
+0.366147 0.180564 0.790569 -0.456435
+0.339446 0.226811 0.843383 -0.349341
+0.306937 0.269177 0.881766 -0.236268
+0.269177 0.306937 0.905061 -0.119154
+0.226811 0.339446 0.912871 3.85421e-08
+0.180564 0.366147 0.905061 0.119154
+0.131227 0.386583 0.881766 0.236268
+0.0796453 0.400404 0.843383 0.349341
+0.0267007 0.407374 0.790569 0.456436
+-0.0267007 0.407374 0.724229 0.555721
+-0.0796453 0.400404 0.645497 0.645497
+-0.131227 0.386583 0.555721 0.724229
+-0.180564 0.366147 0.456435 0.79057
+-0.226811 0.339446 0.34934 0.843383
+-0.269177 0.306937 0.236268 0.881766
+-0.306937 0.269177 0.119153 0.905061
+-0.339446 0.226811 -7.8445e-08 0.912871
+-0.366147 0.180564 -0.119154 0.905061
+-0.386583 0.131227 -0.236269 0.881766
+-0.400404 0.0796452 -0.349341 0.843383
+-0.407374 0.0267006 -0.456436 0.790569
+0.407374 0.0267007 0.236268 -0.881766
+0.400404 0.0796453 0.349341 -0.843383
+0.386583 0.131227 0.456435 -0.790569
+0.366147 0.180564 0.555721 -0.724229
+0.339446 0.226811 0.645497 -0.645497
+0.306937 0.269177 0.724229 -0.555721
+0.269177 0.306937 0.790569 -0.456435
+0.226811 0.339446 0.843383 -0.349341
+0.180564 0.366147 0.881766 -0.236268
+0.131227 0.386583 0.905061 -0.119154
+0.0796453 0.400404 0.912871 -3.30992e-08
+0.0267007 0.407374 0.905061 0.119154
+-0.0267007 0.407374 0.881766 0.236268
+-0.0796453 0.400404 0.843383 0.349341
+-0.131227 0.386583 0.790569 0.456436
+-0.180564 0.366147 0.724229 0.555721
+-0.226811 0.339446 0.645497 0.645497
+-0.269177 0.306937 0.555721 0.724229
+-0.306937 0.269177 0.456435 0.790569
+-0.339446 0.226811 0.349341 0.843383
+-0.366147 0.180564 0.236268 0.881766
+-0.386583 0.131227 0.119153 0.905061
+-0.400404 0.0796452 -2.24449e-07 0.912871
+-0.407374 0.0267006 -0.119154 0.905061
+0.498929 0.0327016 0.383033 -0.776715
+0.490393 0.0975452 0.481138 -0.720074
+0.473465 0.16072 0.57101 -0.651112
+0.448436 0.221144 0.651112 -0.57101
+0.415735 0.277785 0.720074 -0.481138
+0.37592 0.329673 0.776715 -0.383033
+0.329673 0.37592 0.820066 -0.278375
+0.277785 0.415735 0.849385 -0.168953
+0.221144 0.448436 0.864171 -0.0566408
+0.16072 0.473465 0.864171 0.0566408
+0.0975452 0.490393 0.849385 0.168953
+0.0327015 0.498929 0.820066 0.278375
+-0.0327016 0.498929 0.776715 0.383033
+-0.0975452 0.490393 0.720074 0.481138
+-0.16072 0.473465 0.651112 0.57101
+-0.221144 0.448436 0.57101 0.651112
+-0.277785 0.415735 0.481138 0.720074
+-0.329673 0.37592 0.383033 0.776715
+-0.37592 0.329673 0.278375 0.820066
+-0.415735 0.277785 0.168953 0.849385
+-0.448436 0.221144 0.0566407 0.864171
+-0.473465 0.16072 -0.056641 0.864171
+-0.490393 0.097545 -0.168953 0.849385
+-0.498929 0.0327015 -0.278375 0.820065
+0.498929 0.0327016 0.651112 -0.57101
+0.490393 0.0975452 0.720074 -0.481138
+0.473465 0.16072 0.776715 -0.383033
+0.448436 0.221144 0.820066 -0.278375
+0.415735 0.277785 0.849385 -0.168953
+0.37592 0.329673 0.864171 -0.0566408
+0.329673 0.37592 0.864171 0.0566408
+0.277785 0.415735 0.849385 0.168953
+0.221144 0.448436 0.820066 0.278375
+0.16072 0.473465 0.776715 0.383033
+0.0975452 0.490393 0.720074 0.481138
+0.0327015 0.498929 0.651112 0.57101
+-0.0327016 0.498929 0.57101 0.651112
+-0.0975452 0.490393 0.481138 0.720074
+-0.16072 0.473465 0.383033 0.776715
+-0.221144 0.448436 0.278375 0.820066
+-0.277785 0.415735 0.168953 0.849385
+-0.329673 0.37592 0.0566407 0.864171
+-0.37592 0.329673 -0.0566409 0.864171
+-0.415735 0.277785 -0.168953 0.849385
+-0.448436 0.221144 -0.278375 0.820066
+-0.473465 0.16072 -0.383033 0.776714
+-0.490393 0.097545 -0.481138 0.720074
+-0.498929 0.0327015 -0.57101 0.651112
+0.576114 0.0377605 0.707107 -0.408248
+0.566257 0.112635 0.754344 -0.31246
+0.54671 0.185583 0.788675 -0.211325
+0.51781 0.255356 0.809511 -0.106574
+0.480049 0.320759 0.816497 1.21708e-09
+0.434075 0.380673 0.809511 0.106574
+0.380673 0.434075 0.788675 0.211325
+0.320759 0.480049 0.754344 0.31246
+0.255355 0.51781 0.707107 0.408248
+0.185583 0.54671 0.64777 0.497052
+0.112635 0.566257 0.57735 0.57735
+0.0377605 0.576114 0.497052 0.64777
+-0.0377605 0.576114 0.408248 0.707107
+-0.112635 0.566257 0.31246 0.754344
+-0.185583 0.54671 0.211325 0.788675
+-0.255356 0.51781 0.106574 0.809511
+-0.320759 0.480049 -1.34241e-07 0.816497
+-0.380674 0.434075 -0.106574 0.809511
+-0.434075 0.380673 -0.211325 0.788675
+-0.480049 0.320759 -0.31246 0.754344
+-0.51781 0.255355 -0.408248 0.707107
+-0.54671 0.185583 -0.497052 0.64777
+-0.566257 0.112635 -0.57735 0.57735
+-0.576114 0.0377604 -0.64777 0.497051
+0.576114 0.0377605 0.497052 -0.64777
+0.566257 0.112635 0.57735 -0.57735
+0.54671 0.185583 0.64777 -0.497052
+0.51781 0.255356 0.707107 -0.408248
+0.480049 0.320759 0.754344 -0.31246
+0.434075 0.380673 0.788675 -0.211325
+0.380673 0.434075 0.809511 -0.106574
+0.320759 0.480049 0.816497 3.44731e-08
+0.255355 0.51781 0.809511 0.106574
+0.185583 0.54671 0.788675 0.211325
+0.112635 0.566257 0.754344 0.31246
+0.0377605 0.576114 0.707107 0.408248
+-0.0377605 0.576114 0.64777 0.497052
+-0.112635 0.566257 0.57735 0.57735
+-0.185583 0.54671 0.497052 0.64777
+-0.255356 0.51781 0.408248 0.707107
+-0.320759 0.480049 0.31246 0.754345
+-0.380674 0.434075 0.211325 0.788675
+-0.434075 0.380673 0.106574 0.809511
+-0.480049 0.320759 -7.01633e-08 0.816497
+-0.51781 0.255355 -0.106574 0.809511
+-0.54671 0.185583 -0.211325 0.788675
+-0.566257 0.112635 -0.31246 0.754344
+-0.576114 0.0377604 -0.408248 0.707107
+0.644115 0.0422175 0.574227 -0.503584
+0.633094 0.12593 0.635045 -0.424324
+0.611241 0.207488 0.684998 -0.337804
+0.578929 0.285496 0.72323 -0.245503
+0.536711 0.358619 0.749087 -0.149003
+0.485311 0.425606 0.762127 -0.0499525
+0.425606 0.485311 0.762127 0.0499525
+0.358619 0.536711 0.749087 0.149003
+0.285496 0.578929 0.72323 0.245503
+0.207488 0.611241 0.684998 0.337804
+0.12593 0.633094 0.635045 0.424324
+0.0422175 0.644115 0.574227 0.503584
+-0.0422176 0.644115 0.503584 0.574227
+-0.12593 0.633094 0.424324 0.635045
+-0.207488 0.611241 0.337804 0.684998
+-0.285496 0.578929 0.245503 0.72323
+-0.358619 0.536711 0.149003 0.749087
+-0.425606 0.48531 0.0499524 0.762127
+-0.485311 0.425606 -0.0499526 0.762127
+-0.536711 0.358619 -0.149003 0.749087
+-0.578929 0.285496 -0.245503 0.72323
+-0.611241 0.207488 -0.337804 0.684998
+-0.633094 0.12593 -0.424324 0.635045
+-0.644115 0.0422174 -0.503584 0.574227
diff --git a/dimos/models/manipulation/contact_graspnet_pytorch/uniform_quaternions/data3_36864.qua b/dimos/models/manipulation/contact_graspnet_pytorch/uniform_quaternions/data3_36864.qua
new file mode 100644
index 0000000000..c745061f86
--- /dev/null
+++ b/dimos/models/manipulation/contact_graspnet_pytorch/uniform_quaternions/data3_36864.qua
@@ -0,0 +1,36864 @@
+0.735586 0.0240806 0.462794 0.49412
+0.732436 0.0721387 0.429486 0.523331
+0.72615 0.119888 0.394339 0.5503
+0.716754 0.167124 0.357504 0.574913
+0.704289 0.213644 0.319137 0.597064
+0.688808 0.259249 0.279404 0.616658
+0.670378 0.303744 0.238474 0.633611
+0.649076 0.346939 0.196524 0.647852
+0.624996 0.388647 0.153731 0.659318
+0.598239 0.428692 0.110281 0.667961
+0.56892 0.466901 0.0663579 0.673743
+0.537165 0.50311 0.0221509 0.676641
+0.50311 0.537165 -0.0221509 0.676641
+0.466901 0.56892 -0.0663579 0.673743
+0.428692 0.598239 -0.110281 0.667961
+0.388647 0.624996 -0.153731 0.659318
+0.346939 0.649077 -0.196524 0.647852
+0.303744 0.670378 -0.238474 0.633611
+0.259249 0.688808 -0.279404 0.616658
+0.213644 0.704289 -0.319137 0.597064
+0.167124 0.716754 -0.357504 0.574913
+0.119888 0.72615 -0.394339 0.5503
+0.0721386 0.732436 -0.429486 0.52333
+0.0240805 0.735586 -0.462794 0.49412
+-0.0240807 0.735586 -0.49412 0.462794
+-0.0721387 0.732436 -0.523331 0.429486
+-0.119888 0.72615 -0.5503 0.394339
+-0.167124 0.716754 -0.574913 0.357504
+-0.213644 0.704289 -0.597064 0.319137
+-0.259249 0.688808 -0.616658 0.279404
+-0.303744 0.670378 -0.633611 0.238474
+-0.346939 0.649077 -0.647852 0.196524
+-0.388647 0.624996 -0.659318 0.153731
+-0.428692 0.598239 -0.667961 0.110281
+-0.466901 0.56892 -0.673743 0.0663579
+-0.50311 0.537165 -0.676641 0.0221509
+-0.537165 0.50311 -0.676641 -0.0221509
+-0.56892 0.466901 -0.673743 -0.0663579
+-0.598239 0.428692 -0.667961 -0.110281
+-0.624996 0.388647 -0.659318 -0.153731
+-0.649076 0.346939 -0.647852 -0.196524
+-0.670378 0.303744 -0.633611 -0.238474
+-0.688808 0.259249 -0.616658 -0.279404
+-0.704289 0.213644 -0.597064 -0.319137
+-0.716754 0.167124 -0.574913 -0.357504
+-0.72615 0.119888 -0.5503 -0.394339
+-0.732436 0.0721386 -0.523331 -0.429486
+-0.735586 0.0240807 -0.49412 -0.462794
+0.763354 0.0249896 0.392954 0.512107
+0.760085 0.0748618 0.358619 0.536711
+0.753561 0.124413 0.322749 0.559017
+0.743811 0.173432 0.285496 0.578929
+0.730875 0.221709 0.247021 0.596362
+0.71481 0.269035 0.207488 0.611241
+0.695684 0.31521 0.167067 0.623502
+0.673578 0.360035 0.12593 0.633094
+0.648589 0.403318 0.0842543 0.639975
+0.620822 0.444875 0.0422175 0.644115
+0.590396 0.484526 1.17023e-08 0.645497
+0.557443 0.522102 -0.0422176 0.644115
+0.522102 0.557443 -0.0842543 0.639975
+0.484526 0.590396 -0.12593 0.633094
+0.444875 0.620822 -0.167067 0.623502
+0.403318 0.648589 -0.207488 0.611241
+0.360035 0.673579 -0.247021 0.596362
+0.31521 0.695684 -0.285496 0.578929
+0.269035 0.71481 -0.322749 0.559017
+0.221709 0.730875 -0.358619 0.536711
+0.173432 0.743811 -0.392954 0.512107
+0.124413 0.753561 -0.425606 0.48531
+0.0748617 0.760085 -0.456436 0.456435
+0.0249895 0.763354 -0.485311 0.425606
+-0.0249897 0.763354 -0.512107 0.392954
+-0.0748619 0.760085 -0.536711 0.358619
+-0.124414 0.753561 -0.559017 0.322749
+-0.173432 0.743811 -0.578929 0.285496
+-0.221709 0.730875 -0.596362 0.247021
+-0.269036 0.71481 -0.611241 0.207488
+-0.31521 0.695684 -0.623502 0.167067
+-0.360035 0.673579 -0.633094 0.12593
+-0.403318 0.648589 -0.639975 0.0842543
+-0.444875 0.620822 -0.644115 0.0422175
+-0.484526 0.590397 -0.645497 2.19614e-08
+-0.522102 0.557443 -0.644115 -0.0422176
+-0.557443 0.522102 -0.639975 -0.0842543
+-0.590397 0.484526 -0.633094 -0.12593
+-0.620822 0.444875 -0.623502 -0.167067
+-0.648589 0.403318 -0.611241 -0.207488
+-0.673578 0.360035 -0.596362 -0.247021
+-0.695684 0.31521 -0.578929 -0.285496
+-0.71481 0.269035 -0.559017 -0.322749
+-0.730875 0.221709 -0.536711 -0.358619
+-0.743811 0.173432 -0.512107 -0.392954
+-0.753561 0.124414 -0.485311 -0.425606
+-0.760085 0.0748618 -0.456435 -0.456435
+-0.763354 0.0249897 -0.425606 -0.48531
+0.763354 0.0249896 0.485311 0.425606
+0.760085 0.0748618 0.456435 0.456435
+0.753561 0.124413 0.425606 0.485311
+0.743811 0.173432 0.392954 0.512107
+0.730875 0.221709 0.358619 0.536711
+0.71481 0.269035 0.322749 0.559017
+0.695684 0.31521 0.285496 0.578929
+0.673578 0.360035 0.247021 0.596362
+0.648589 0.403318 0.207488 0.611241
+0.620822 0.444875 0.167067 0.623502
+0.590396 0.484526 0.12593 0.633094
+0.557443 0.522102 0.0842543 0.639975
+0.522102 0.557443 0.0422175 0.644115
+0.484526 0.590396 -1.44328e-09 0.645497
+0.444875 0.620822 -0.0422176 0.644115
+0.403318 0.648589 -0.0842544 0.639975
+0.360035 0.673579 -0.12593 0.633094
+0.31521 0.695684 -0.167067 0.623502
+0.269035 0.71481 -0.207488 0.611241
+0.221709 0.730875 -0.247021 0.596362
+0.173432 0.743811 -0.285496 0.578929
+0.124413 0.753561 -0.322749 0.559017
+0.0748617 0.760085 -0.358619 0.536711
+0.0249895 0.763354 -0.392954 0.512107
+-0.0249897 0.763354 -0.425606 0.48531
+-0.0748619 0.760085 -0.456436 0.456435
+-0.124414 0.753561 -0.485311 0.425606
+-0.173432 0.743811 -0.512107 0.392954
+-0.221709 0.730875 -0.536711 0.358619
+-0.269036 0.71481 -0.559017 0.322749
+-0.31521 0.695684 -0.578929 0.285496
+-0.360035 0.673579 -0.596362 0.247021
+-0.403318 0.648589 -0.611241 0.207488
+-0.444875 0.620822 -0.623502 0.167067
+-0.484526 0.590397 -0.633094 0.12593
+-0.522102 0.557443 -0.639975 0.0842542
+-0.557443 0.522102 -0.644115 0.0422176
+-0.590397 0.484526 -0.645497 -2.96589e-08
+-0.620822 0.444875 -0.644115 -0.0422175
+-0.648589 0.403318 -0.639975 -0.0842543
+-0.673578 0.360035 -0.633094 -0.12593
+-0.695684 0.31521 -0.623502 -0.167067
+-0.71481 0.269035 -0.611241 -0.207488
+-0.730875 0.221709 -0.596362 -0.247021
+-0.743811 0.173432 -0.578929 -0.285496
+-0.753561 0.124414 -0.559017 -0.322749
+-0.760085 0.0748618 -0.536711 -0.358619
+-0.763354 0.0249897 -0.512107 -0.392954
+0.790146 0.0258667 0.418613 0.446949
+0.786763 0.0774894 0.388485 0.47337
+0.78001 0.12878 0.356693 0.497765
+0.769917 0.17952 0.323374 0.520028
+0.756528 0.22949 0.28867 0.540064
+0.739899 0.278478 0.25273 0.557788
+0.720101 0.326274 0.215708 0.573123
+0.69722 0.372672 0.177762 0.586004
+0.671353 0.417474 0.139055 0.596375
+0.642612 0.460489 0.0997527 0.604193
+0.611118 0.501532 0.060023 0.609424
+0.577008 0.540427 0.0200362 0.612045
+0.540427 0.577008 -0.0200363 0.612045
+0.501532 0.611118 -0.060023 0.609424
+0.460489 0.642612 -0.0997527 0.604193
+0.417474 0.671353 -0.139055 0.596375
+0.372672 0.69722 -0.177762 0.586004
+0.326274 0.720101 -0.215708 0.573123
+0.278478 0.739899 -0.25273 0.557788
+0.22949 0.756528 -0.28867 0.540064
+0.17952 0.769917 -0.323374 0.520028
+0.12878 0.78001 -0.356693 0.497765
+0.0774893 0.786763 -0.388485 0.47337
+0.0258666 0.790146 -0.418613 0.446949
+-0.0258668 0.790146 -0.446949 0.418613
+-0.0774894 0.786763 -0.47337 0.388485
+-0.12878 0.78001 -0.497765 0.356693
+-0.17952 0.769917 -0.520028 0.323374
+-0.22949 0.756528 -0.540064 0.28867
+-0.278478 0.739899 -0.557788 0.25273
+-0.326274 0.720101 -0.573123 0.215708
+-0.372672 0.69722 -0.586004 0.177762
+-0.417474 0.671353 -0.596375 0.139055
+-0.460489 0.642612 -0.604193 0.0997526
+-0.501532 0.611118 -0.609424 0.060023
+-0.540427 0.577008 -0.612045 0.0200362
+-0.577008 0.540427 -0.612045 -0.0200362
+-0.611118 0.501532 -0.609424 -0.060023
+-0.642612 0.460489 -0.604193 -0.0997526
+-0.671353 0.417474 -0.596375 -0.139055
+-0.69722 0.372672 -0.586004 -0.177762
+-0.720101 0.326274 -0.573123 -0.215708
+-0.739899 0.278478 -0.557788 -0.25273
+-0.756528 0.22949 -0.540064 -0.28867
+-0.769917 0.179519 -0.520028 -0.323374
+-0.78001 0.12878 -0.497765 -0.356693
+-0.786763 0.0774893 -0.47337 -0.388485
+-0.790146 0.0258668 -0.446949 -0.418613
+0.790146 0.0258667 0.323374 0.520028
+0.786763 0.0774894 0.28867 0.540064
+0.78001 0.12878 0.25273 0.557788
+0.769917 0.17952 0.215708 0.573123
+0.756528 0.22949 0.177762 0.586004
+0.739899 0.278478 0.139055 0.596375
+0.720101 0.326274 0.0997527 0.604193
+0.69722 0.372672 0.060023 0.609424
+0.671353 0.417474 0.0200363 0.612045
+0.642612 0.460489 -0.0200363 0.612045
+0.611118 0.501532 -0.060023 0.609424
+0.577008 0.540427 -0.0997527 0.604193
+0.540427 0.577008 -0.139055 0.596375
+0.501532 0.611118 -0.177762 0.586004
+0.460489 0.642612 -0.215708 0.573123
+0.417474 0.671353 -0.25273 0.557788
+0.372672 0.69722 -0.28867 0.540064
+0.326274 0.720101 -0.323374 0.520028
+0.278478 0.739899 -0.356693 0.497765
+0.22949 0.756528 -0.388485 0.47337
+0.17952 0.769917 -0.418613 0.446949
+0.12878 0.78001 -0.446949 0.418613
+0.0774893 0.786763 -0.47337 0.388485
+0.0258666 0.790146 -0.497765 0.356693
+-0.0258668 0.790146 -0.520028 0.323374
+-0.0774894 0.786763 -0.540064 0.28867
+-0.12878 0.78001 -0.557788 0.25273
+-0.17952 0.769917 -0.573123 0.215708
+-0.22949 0.756528 -0.586004 0.177762
+-0.278478 0.739899 -0.596375 0.139055
+-0.326274 0.720101 -0.604193 0.0997526
+-0.372672 0.69722 -0.609424 0.0600231
+-0.417474 0.671353 -0.612045 0.0200363
+-0.460489 0.642612 -0.612045 -0.0200363
+-0.501532 0.611118 -0.609424 -0.060023
+-0.540427 0.577008 -0.604193 -0.0997527
+-0.577008 0.540427 -0.596375 -0.139055
+-0.611118 0.501532 -0.586004 -0.177762
+-0.642612 0.460489 -0.573123 -0.215708
+-0.671353 0.417474 -0.557788 -0.25273
+-0.69722 0.372672 -0.540064 -0.28867
+-0.720101 0.326274 -0.520028 -0.323374
+-0.739899 0.278478 -0.497765 -0.356693
+-0.756528 0.22949 -0.47337 -0.388485
+-0.769917 0.179519 -0.446949 -0.418613
+-0.78001 0.12878 -0.418613 -0.446949
+-0.786763 0.0774893 -0.388485 -0.47337
+-0.790146 0.0258668 -0.356693 -0.497765
+0.816059 0.026715 0.255355 0.51781
+0.812565 0.0800307 0.220942 0.533402
+0.805591 0.133004 0.185583 0.54671
+0.795167 0.185407 0.149429 0.557678
+0.781339 0.237016 0.112635 0.566257
+0.764164 0.287611 0.0753593 0.572411
+0.743717 0.336974 0.0377605 0.576114
+0.720086 0.384894 -1.21881e-08 0.57735
+0.693371 0.431166 -0.0377605 0.576114
+0.663687 0.475591 -0.0753593 0.572411
+0.63116 0.51798 -0.112635 0.566257
+0.595932 0.558151 -0.149429 0.557678
+0.558151 0.595932 -0.185583 0.54671
+0.51798 0.63116 -0.220942 0.533402
+0.475591 0.663687 -0.255356 0.51781
+0.431166 0.693371 -0.288675 0.5
+0.384894 0.720086 -0.320759 0.480049
+0.336974 0.743717 -0.351469 0.458043
+0.287611 0.764164 -0.380674 0.434075
+0.237016 0.781339 -0.408248 0.408248
+0.185407 0.795167 -0.434075 0.380673
+0.133003 0.805591 -0.458043 0.351469
+0.0800306 0.812565 -0.480049 0.320759
+0.0267149 0.816059 -0.5 0.288675
+-0.0267151 0.816059 -0.51781 0.255355
+-0.0800307 0.812565 -0.533402 0.220942
+-0.133004 0.805591 -0.54671 0.185583
+-0.185407 0.795167 -0.557678 0.149429
+-0.237017 0.781338 -0.566257 0.112635
+-0.287611 0.764164 -0.572411 0.0753592
+-0.336974 0.743717 -0.576114 0.0377604
+-0.384894 0.720086 -0.57735 6.58134e-08
+-0.431166 0.693371 -0.576114 -0.0377605
+-0.475591 0.663686 -0.572411 -0.0753594
+-0.51798 0.63116 -0.566257 -0.112635
+-0.558151 0.595931 -0.557678 -0.149429
+-0.595931 0.558151 -0.54671 -0.185583
+-0.63116 0.51798 -0.533402 -0.220942
+-0.663686 0.475591 -0.51781 -0.255355
+-0.693371 0.431166 -0.5 -0.288675
+-0.720086 0.384894 -0.480049 -0.320759
+-0.743717 0.336974 -0.458043 -0.351469
+-0.764164 0.287611 -0.434075 -0.380674
+-0.781339 0.237016 -0.408248 -0.408248
+-0.795167 0.185407 -0.380673 -0.434075
+-0.805591 0.133004 -0.351469 -0.458043
+-0.812565 0.0800306 -0.320759 -0.480049
+-0.816059 0.0267151 -0.288675 -0.5
+0.816059 0.026715 0.351469 0.458043
+0.812565 0.0800307 0.320759 0.480049
+0.805591 0.133004 0.288675 0.5
+0.795167 0.185407 0.255355 0.51781
+0.781339 0.237016 0.220942 0.533402
+0.764164 0.287611 0.185583 0.54671
+0.743717 0.336974 0.149429 0.557678
+0.720086 0.384894 0.112635 0.566257
+0.693371 0.431166 0.0753593 0.572411
+0.663687 0.475591 0.0377605 0.576114
+0.63116 0.51798 1.04669e-08 0.57735
+0.595932 0.558151 -0.0377605 0.576114
+0.558151 0.595932 -0.0753593 0.572411
+0.51798 0.63116 -0.112635 0.566257
+0.475591 0.663687 -0.149429 0.557678
+0.431166 0.693371 -0.185583 0.54671
+0.384894 0.720086 -0.220942 0.533402
+0.336974 0.743717 -0.255356 0.51781
+0.287611 0.764164 -0.288675 0.5
+0.237016 0.781339 -0.320759 0.480049
+0.185407 0.795167 -0.351469 0.458043
+0.133003 0.805591 -0.380674 0.434075
+0.0800306 0.812565 -0.408248 0.408248
+0.0267149 0.816059 -0.434075 0.380673
+-0.0267151 0.816059 -0.458043 0.351469
+-0.0800307 0.812565 -0.480049 0.320759
+-0.133004 0.805591 -0.5 0.288675
+-0.185407 0.795167 -0.51781 0.255355
+-0.237017 0.781338 -0.533402 0.220942
+-0.287611 0.764164 -0.54671 0.185583
+-0.336974 0.743717 -0.557678 0.149429
+-0.384894 0.720086 -0.566257 0.112636
+-0.431166 0.693371 -0.572411 0.0753593
+-0.475591 0.663686 -0.576114 0.0377605
+-0.51798 0.63116 -0.57735 1.96429e-08
+-0.558151 0.595931 -0.576114 -0.0377606
+-0.595931 0.558151 -0.572411 -0.0753593
+-0.63116 0.51798 -0.566257 -0.112635
+-0.663686 0.475591 -0.557678 -0.149429
+-0.693371 0.431166 -0.54671 -0.185583
+-0.720086 0.384894 -0.533402 -0.220942
+-0.743717 0.336974 -0.51781 -0.255355
+-0.764164 0.287611 -0.5 -0.288675
+-0.781339 0.237016 -0.480049 -0.320759
+-0.795167 0.185407 -0.458043 -0.351469
+-0.805591 0.133004 -0.434075 -0.380673
+-0.812565 0.0800306 -0.408248 -0.408248
+-0.816059 0.0267151 -0.380674 -0.434075
+0.841175 0.0275372 0.285189 0.458622
+0.837573 0.0824937 0.254583 0.476292
+0.830384 0.137097 0.222887 0.491923
+0.81964 0.191113 0.190237 0.505447
+0.805385 0.244311 0.156772 0.516807
+0.787682 0.296463 0.122635 0.525954
+0.766606 0.347345 0.0879736 0.532848
+0.742247 0.396739 0.0529353 0.537461
+0.71471 0.444435 0.0176703 0.539773
+0.684112 0.490228 -0.0176703 0.539773
+0.650585 0.533921 -0.0529353 0.537461
+0.614272 0.575329 -0.0879736 0.532848
+0.575329 0.614272 -0.122635 0.525954
+0.533922 0.650585 -0.156772 0.516807
+0.490228 0.684112 -0.190237 0.505447
+0.444435 0.71471 -0.222887 0.491923
+0.396739 0.742247 -0.254583 0.476292
+0.347345 0.766606 -0.285189 0.458622
+0.296463 0.787682 -0.314574 0.438987
+0.244311 0.805385 -0.342612 0.417473
+0.191113 0.81964 -0.369182 0.394172
+0.137097 0.830384 -0.394172 0.369182
+0.0824936 0.837573 -0.417473 0.342611
+0.0275371 0.841175 -0.438987 0.314574
+-0.0275373 0.841175 -0.458622 0.285189
+-0.0824938 0.837573 -0.476292 0.254583
+-0.137097 0.830384 -0.491923 0.222887
+-0.191113 0.81964 -0.505447 0.190237
+-0.244311 0.805385 -0.516807 0.156772
+-0.296463 0.787682 -0.525954 0.122635
+-0.347345 0.766606 -0.532848 0.0879735
+-0.396739 0.742247 -0.537461 0.0529354
+-0.444435 0.71471 -0.539773 0.0176703
+-0.490228 0.684112 -0.539773 -0.0176704
+-0.533921 0.650585 -0.537461 -0.0529353
+-0.575329 0.614272 -0.532848 -0.0879736
+-0.614272 0.575329 -0.525954 -0.122635
+-0.650585 0.533921 -0.516807 -0.156772
+-0.684112 0.490228 -0.505447 -0.190237
+-0.71471 0.444435 -0.491923 -0.222887
+-0.742247 0.39674 -0.476292 -0.254583
+-0.766606 0.347345 -0.458622 -0.285189
+-0.787682 0.296463 -0.438987 -0.314574
+-0.805385 0.244311 -0.417473 -0.342612
+-0.81964 0.191113 -0.394172 -0.369182
+-0.830384 0.137097 -0.369182 -0.394172
+-0.837573 0.0824937 -0.342612 -0.417473
+-0.841175 0.0275373 -0.314574 -0.438987
+0.790146 0.0258667 0.497765 0.356693
+0.786763 0.0774894 0.47337 0.388485
+0.78001 0.12878 0.446949 0.418613
+0.769917 0.17952 0.418613 0.446949
+0.756528 0.22949 0.388485 0.47337
+0.739899 0.278478 0.356693 0.497765
+0.720101 0.326274 0.323374 0.520028
+0.69722 0.372672 0.28867 0.540064
+0.671353 0.417474 0.25273 0.557788
+0.642612 0.460489 0.215708 0.573123
+0.611118 0.501532 0.177762 0.586004
+0.577008 0.540427 0.139055 0.596375
+0.540427 0.577008 0.0997527 0.604193
+0.501532 0.611118 0.060023 0.609424
+0.460489 0.642612 0.0200362 0.612045
+0.417474 0.671353 -0.0200363 0.612045
+0.372672 0.69722 -0.060023 0.609424
+0.326274 0.720101 -0.0997527 0.604193
+0.278478 0.739899 -0.139055 0.596375
+0.22949 0.756528 -0.177762 0.586004
+0.17952 0.769917 -0.215708 0.573123
+0.12878 0.78001 -0.25273 0.557788
+0.0774893 0.786763 -0.28867 0.540064
+0.0258666 0.790146 -0.323374 0.520028
+-0.0258668 0.790146 -0.356693 0.497765
+-0.0774894 0.786763 -0.388485 0.47337
+-0.12878 0.78001 -0.418613 0.446949
+-0.17952 0.769917 -0.446949 0.418613
+-0.22949 0.756528 -0.47337 0.388485
+-0.278478 0.739899 -0.497765 0.356693
+-0.326274 0.720101 -0.520028 0.323374
+-0.372672 0.69722 -0.540064 0.28867
+-0.417474 0.671353 -0.557788 0.25273
+-0.460489 0.642612 -0.573123 0.215708
+-0.501532 0.611118 -0.586004 0.177762
+-0.540427 0.577008 -0.596375 0.139055
+-0.577008 0.540427 -0.604193 0.0997527
+-0.611118 0.501532 -0.609424 0.060023
+-0.642612 0.460489 -0.612045 0.0200363
+-0.671353 0.417474 -0.612045 -0.0200363
+-0.69722 0.372672 -0.609424 -0.0600229
+-0.720101 0.326274 -0.604193 -0.0997527
+-0.739899 0.278478 -0.596375 -0.139055
+-0.756528 0.22949 -0.586004 -0.177762
+-0.769917 0.179519 -0.573123 -0.215708
+-0.78001 0.12878 -0.557788 -0.25273
+-0.786763 0.0774893 -0.540064 -0.28867
+-0.790146 0.0258668 -0.520028 -0.323374
+0.816059 0.026715 0.434075 0.380673
+0.812565 0.0800307 0.408248 0.408248
+0.805591 0.133004 0.380673 0.434075
+0.795167 0.185407 0.351469 0.458043
+0.781339 0.237016 0.320759 0.480049
+0.764164 0.287611 0.288675 0.5
+0.743717 0.336974 0.255355 0.51781
+0.720086 0.384894 0.220942 0.533402
+0.693371 0.431166 0.185583 0.54671
+0.663687 0.475591 0.149429 0.557678
+0.63116 0.51798 0.112635 0.566257
+0.595932 0.558151 0.0753593 0.572411
+0.558151 0.595932 0.0377605 0.576114
+0.51798 0.63116 -1.29091e-09 0.57735
+0.475591 0.663687 -0.0377605 0.576114
+0.431166 0.693371 -0.0753594 0.572411
+0.384894 0.720086 -0.112635 0.566257
+0.336974 0.743717 -0.149429 0.557678
+0.287611 0.764164 -0.185583 0.54671
+0.237016 0.781339 -0.220942 0.533402
+0.185407 0.795167 -0.255356 0.51781
+0.133003 0.805591 -0.288675 0.5
+0.0800306 0.812565 -0.320759 0.480049
+0.0267149 0.816059 -0.351469 0.458043
+-0.0267151 0.816059 -0.380674 0.434075
+-0.0800307 0.812565 -0.408248 0.408248
+-0.133004 0.805591 -0.434075 0.380673
+-0.185407 0.795167 -0.458043 0.351469
+-0.237017 0.781338 -0.480049 0.320759
+-0.287611 0.764164 -0.5 0.288675
+-0.336974 0.743717 -0.51781 0.255355
+-0.384894 0.720086 -0.533402 0.220942
+-0.431166 0.693371 -0.54671 0.185583
+-0.475591 0.663686 -0.557678 0.149429
+-0.51798 0.63116 -0.566257 0.112635
+-0.558151 0.595931 -0.572411 0.0753593
+-0.595931 0.558151 -0.576114 0.0377605
+-0.63116 0.51798 -0.57735 -2.65277e-08
+-0.663686 0.475591 -0.576114 -0.0377605
+-0.693371 0.431166 -0.572411 -0.0753593
+-0.720086 0.384894 -0.566257 -0.112635
+-0.743717 0.336974 -0.557678 -0.149429
+-0.764164 0.287611 -0.54671 -0.185583
+-0.781339 0.237016 -0.533402 -0.220942
+-0.795167 0.185407 -0.51781 -0.255356
+-0.805591 0.133004 -0.5 -0.288675
+-0.812565 0.0800306 -0.480049 -0.320759
+-0.816059 0.0267151 -0.458043 -0.351469
+0.816059 0.026715 0.5 0.288675
+0.812565 0.0800307 0.480049 0.320759
+0.805591 0.133004 0.458043 0.351469
+0.795167 0.185407 0.434075 0.380673
+0.781339 0.237016 0.408248 0.408248
+0.764164 0.287611 0.380673 0.434075
+0.743717 0.336974 0.351469 0.458043
+0.720086 0.384894 0.320759 0.480049
+0.693371 0.431166 0.288675 0.5
+0.663687 0.475591 0.255355 0.51781
+0.63116 0.51798 0.220942 0.533402
+0.595932 0.558151 0.185583 0.54671
+0.558151 0.595932 0.149429 0.557678
+0.51798 0.63116 0.112635 0.566257
+0.475591 0.663687 0.0753593 0.572411
+0.431166 0.693371 0.0377605 0.576114
+0.384894 0.720086 -4.74615e-08 0.57735
+0.336974 0.743717 -0.0377606 0.576114
+0.287611 0.764164 -0.0753594 0.572411
+0.237016 0.781339 -0.112635 0.566257
+0.185407 0.795167 -0.149429 0.557678
+0.133003 0.805591 -0.185583 0.54671
+0.0800306 0.812565 -0.220942 0.533402
+0.0267149 0.816059 -0.255356 0.51781
+-0.0267151 0.816059 -0.288675 0.5
+-0.0800307 0.812565 -0.320759 0.480049
+-0.133004 0.805591 -0.351469 0.458043
+-0.185407 0.795167 -0.380674 0.434075
+-0.237017 0.781338 -0.408248 0.408248
+-0.287611 0.764164 -0.434075 0.380673
+-0.336974 0.743717 -0.458043 0.351469
+-0.384894 0.720086 -0.480049 0.320759
+-0.431166 0.693371 -0.5 0.288675
+-0.475591 0.663686 -0.51781 0.255355
+-0.51798 0.63116 -0.533402 0.220942
+-0.558151 0.595931 -0.54671 0.185583
+-0.595931 0.558151 -0.557678 0.149429
+-0.63116 0.51798 -0.566257 0.112635
+-0.663686 0.475591 -0.572411 0.0753594
+-0.693371 0.431166 -0.576114 0.0377605
+-0.720086 0.384894 -0.57735 6.49528e-08
+-0.743717 0.336974 -0.576114 -0.0377605
+-0.764164 0.287611 -0.572411 -0.0753594
+-0.781339 0.237016 -0.566257 -0.112635
+-0.795167 0.185407 -0.557678 -0.149429
+-0.805591 0.133004 -0.54671 -0.185583
+-0.812565 0.0800306 -0.533402 -0.220942
+-0.816059 0.0267151 -0.51781 -0.255355
+0.841175 0.0275372 0.438987 0.314574
+0.837573 0.0824937 0.417473 0.342612
+0.830384 0.137097 0.394172 0.369182
+0.81964 0.191113 0.369182 0.394172
+0.805385 0.244311 0.342612 0.417473
+0.787682 0.296463 0.314574 0.438987
+0.766606 0.347345 0.285189 0.458622
+0.742247 0.396739 0.254583 0.476292
+0.71471 0.444435 0.222887 0.491923
+0.684112 0.490228 0.190237 0.505447
+0.650585 0.533921 0.156772 0.516807
+0.614272 0.575329 0.122635 0.525954
+0.575329 0.614272 0.0879736 0.532848
+0.533922 0.650585 0.0529353 0.537461
+0.490228 0.684112 0.0176703 0.539773
+0.444435 0.71471 -0.0176704 0.539773
+0.396739 0.742247 -0.0529354 0.537461
+0.347345 0.766606 -0.0879736 0.532848
+0.296463 0.787682 -0.122635 0.525954
+0.244311 0.805385 -0.156772 0.516807
+0.191113 0.81964 -0.190237 0.505447
+0.137097 0.830384 -0.222887 0.491923
+0.0824936 0.837573 -0.254583 0.476292
+0.0275371 0.841175 -0.285189 0.458622
+-0.0275373 0.841175 -0.314574 0.438987
+-0.0824938 0.837573 -0.342612 0.417473
+-0.137097 0.830384 -0.369182 0.394172
+-0.191113 0.81964 -0.394172 0.369182
+-0.244311 0.805385 -0.417473 0.342611
+-0.296463 0.787682 -0.438987 0.314574
+-0.347345 0.766606 -0.458622 0.285189
+-0.396739 0.742247 -0.476292 0.254583
+-0.444435 0.71471 -0.491923 0.222887
+-0.490228 0.684112 -0.505447 0.190237
+-0.533921 0.650585 -0.516807 0.156772
+-0.575329 0.614272 -0.525954 0.122635
+-0.614272 0.575329 -0.532848 0.0879736
+-0.650585 0.533921 -0.537461 0.0529353
+-0.684112 0.490228 -0.539773 0.0176704
+-0.71471 0.444435 -0.539773 -0.0176703
+-0.742247 0.39674 -0.537461 -0.0529352
+-0.766606 0.347345 -0.532848 -0.0879736
+-0.787682 0.296463 -0.525954 -0.122635
+-0.805385 0.244311 -0.516807 -0.156772
+-0.81964 0.191113 -0.505447 -0.190237
+-0.830384 0.137097 -0.491923 -0.222887
+-0.837573 0.0824937 -0.476292 -0.254583
+-0.841175 0.0275373 -0.458622 -0.285189
+0.841175 0.0275372 0.369182 0.394172
+0.837573 0.0824937 0.342612 0.417473
+0.830384 0.137097 0.314574 0.438987
+0.81964 0.191113 0.285189 0.458622
+0.805385 0.244311 0.254583 0.476292
+0.787682 0.296463 0.222887 0.491923
+0.766606 0.347345 0.190237 0.505447
+0.742247 0.396739 0.156772 0.516807
+0.71471 0.444435 0.122635 0.525954
+0.684112 0.490228 0.0879736 0.532848
+0.650585 0.533921 0.0529353 0.537461
+0.614272 0.575329 0.0176703 0.539773
+0.575329 0.614272 -0.0176703 0.539773
+0.533922 0.650585 -0.0529353 0.537461
+0.490228 0.684112 -0.0879736 0.532848
+0.444435 0.71471 -0.122635 0.525954
+0.396739 0.742247 -0.156772 0.516807
+0.347345 0.766606 -0.190237 0.505447
+0.296463 0.787682 -0.222887 0.491923
+0.244311 0.805385 -0.254583 0.476292
+0.191113 0.81964 -0.285189 0.458622
+0.137097 0.830384 -0.314574 0.438987
+0.0824936 0.837573 -0.342612 0.417473
+0.0275371 0.841175 -0.369182 0.394172
+-0.0275373 0.841175 -0.394172 0.369182
+-0.0824938 0.837573 -0.417473 0.342611
+-0.137097 0.830384 -0.438987 0.314574
+-0.191113 0.81964 -0.458622 0.285189
+-0.244311 0.805385 -0.476292 0.254583
+-0.296463 0.787682 -0.491923 0.222887
+-0.347345 0.766606 -0.505447 0.190237
+-0.396739 0.742247 -0.516807 0.156772
+-0.444435 0.71471 -0.525954 0.122635
+-0.490228 0.684112 -0.532848 0.0879736
+-0.533921 0.650585 -0.537461 0.0529353
+-0.575329 0.614272 -0.539773 0.0176703
+-0.614272 0.575329 -0.539773 -0.0176703
+-0.650585 0.533921 -0.537461 -0.0529353
+-0.684112 0.490228 -0.532848 -0.0879736
+-0.71471 0.444435 -0.525954 -0.122635
+-0.742247 0.39674 -0.516807 -0.156772
+-0.766606 0.347345 -0.505447 -0.190237
+-0.787682 0.296463 -0.491923 -0.222887
+-0.805385 0.244311 -0.476292 -0.254583
+-0.81964 0.191113 -0.458622 -0.285189
+-0.830384 0.137097 -0.438987 -0.314574
+-0.837573 0.0824937 -0.417473 -0.342612
+-0.841175 0.0275373 -0.394172 -0.369182
+0.865562 0.0283356 0.304381 0.396677
+0.861855 0.0848853 0.277785 0.415735
+0.854458 0.141072 0.25 0.433013
+0.843402 0.196654 0.221144 0.448436
+0.828735 0.251394 0.191342 0.46194
+0.810518 0.305057 0.16072 0.473465
+0.788831 0.357415 0.12941 0.482963
+0.763766 0.408242 0.0975452 0.490393
+0.735431 0.45732 0.0652631 0.495722
+0.703946 0.50444 0.0327016 0.498929
+0.669447 0.549401 9.06458e-09 0.5
+0.632081 0.592008 -0.0327016 0.498929
+0.592008 0.632081 -0.0652631 0.495722
+0.549401 0.669447 -0.0975452 0.490393
+0.50444 0.703946 -0.12941 0.482963
+0.45732 0.735431 -0.16072 0.473465
+0.408241 0.763766 -0.191342 0.46194
+0.357415 0.788831 -0.221144 0.448436
+0.305057 0.810518 -0.25 0.433013
+0.251394 0.828735 -0.277785 0.415735
+0.196654 0.843402 -0.304381 0.396677
+0.141072 0.854458 -0.329673 0.37592
+0.0848852 0.861855 -0.353553 0.353553
+0.0283355 0.865562 -0.37592 0.329673
+-0.0283356 0.865562 -0.396677 0.304381
+-0.0848854 0.861855 -0.415735 0.277785
+-0.141072 0.854458 -0.433013 0.25
+-0.196654 0.843402 -0.448436 0.221144
+-0.251394 0.828735 -0.46194 0.191342
+-0.305058 0.810518 -0.473465 0.16072
+-0.357415 0.788831 -0.482963 0.129409
+-0.408241 0.763766 -0.490393 0.0975452
+-0.45732 0.735431 -0.495722 0.0652631
+-0.504441 0.703946 -0.498929 0.0327015
+-0.549401 0.669447 -0.5 1.70112e-08
+-0.592008 0.632081 -0.498929 -0.0327016
+-0.632081 0.592008 -0.495722 -0.0652631
+-0.669447 0.549401 -0.490393 -0.0975452
+-0.703946 0.504441 -0.482963 -0.129409
+-0.735431 0.45732 -0.473465 -0.16072
+-0.763766 0.408242 -0.46194 -0.191342
+-0.788831 0.357415 -0.448436 -0.221144
+-0.810518 0.305057 -0.433013 -0.25
+-0.828735 0.251394 -0.415735 -0.277785
+-0.843402 0.196654 -0.396677 -0.304381
+-0.854458 0.141072 -0.37592 -0.329673
+-0.861855 0.0848853 -0.353553 -0.353553
+-0.865562 0.0283356 -0.329673 -0.37592
+0.865562 0.0283356 0.37592 0.329673
+0.861855 0.0848853 0.353553 0.353553
+0.854458 0.141072 0.329673 0.37592
+0.843402 0.196654 0.304381 0.396677
+0.828735 0.251394 0.277785 0.415735
+0.810518 0.305057 0.25 0.433013
+0.788831 0.357415 0.221144 0.448436
+0.763766 0.408242 0.191342 0.46194
+0.735431 0.45732 0.16072 0.473465
+0.703946 0.50444 0.12941 0.482963
+0.669447 0.549401 0.0975452 0.490393
+0.632081 0.592008 0.0652631 0.495722
+0.592008 0.632081 0.0327016 0.498929
+0.549401 0.669447 -1.11796e-09 0.5
+0.50444 0.703946 -0.0327016 0.498929
+0.45732 0.735431 -0.0652631 0.495722
+0.408241 0.763766 -0.0975452 0.490393
+0.357415 0.788831 -0.12941 0.482963
+0.305057 0.810518 -0.16072 0.473465
+0.251394 0.828735 -0.191342 0.46194
+0.196654 0.843402 -0.221144 0.448436
+0.141072 0.854458 -0.25 0.433013
+0.0848852 0.861855 -0.277785 0.415735
+0.0283355 0.865562 -0.304381 0.396677
+-0.0283356 0.865562 -0.329673 0.37592
+-0.0848854 0.861855 -0.353553 0.353553
+-0.141072 0.854458 -0.37592 0.329673
+-0.196654 0.843402 -0.396677 0.304381
+-0.251394 0.828735 -0.415735 0.277785
+-0.305058 0.810518 -0.433013 0.25
+-0.357415 0.788831 -0.448436 0.221144
+-0.408241 0.763766 -0.46194 0.191342
+-0.45732 0.735431 -0.473465 0.16072
+-0.504441 0.703946 -0.482963 0.129409
+-0.549401 0.669447 -0.490393 0.0975452
+-0.592008 0.632081 -0.495722 0.0652631
+-0.632081 0.592008 -0.498929 0.0327016
+-0.669447 0.549401 -0.5 -2.29737e-08
+-0.703946 0.504441 -0.498929 -0.0327015
+-0.735431 0.45732 -0.495722 -0.0652631
+-0.763766 0.408242 -0.490393 -0.0975451
+-0.788831 0.357415 -0.482963 -0.12941
+-0.810518 0.305057 -0.473465 -0.16072
+-0.828735 0.251394 -0.46194 -0.191342
+-0.843402 0.196654 -0.448436 -0.221144
+-0.854458 0.141072 -0.433013 -0.25
+-0.861855 0.0848853 -0.415735 -0.277785
+-0.865562 0.0283356 -0.396677 -0.304381
+0.88928 0.029112 0.312016 0.333136
+0.885472 0.0872114 0.28956 0.352829
+0.877872 0.144937 0.265863 0.371012
+0.866513 0.202043 0.241029 0.387606
+0.851444 0.258283 0.215162 0.40254
+0.832728 0.313417 0.188374 0.415751
+0.810447 0.367209 0.160779 0.427181
+0.784695 0.419428 0.132496 0.436782
+0.755583 0.469852 0.103646 0.444512
+0.723236 0.518263 0.0743513 0.450339
+0.687791 0.564456 0.0447385 0.454238
+0.649401 0.608231 0.0149341 0.456191
+0.608231 0.649401 -0.0149342 0.456191
+0.564456 0.687791 -0.0447385 0.454238
+0.518263 0.723236 -0.0743513 0.450339
+0.469852 0.755583 -0.103646 0.444512
+0.419428 0.784695 -0.132496 0.436781
+0.367209 0.810447 -0.160779 0.427181
+0.313417 0.832728 -0.188374 0.415751
+0.258283 0.851444 -0.215162 0.40254
+0.202043 0.866513 -0.241029 0.387606
+0.144937 0.877872 -0.265864 0.371012
+0.0872113 0.885472 -0.28956 0.352829
+0.0291119 0.88928 -0.312016 0.333136
+-0.0291121 0.88928 -0.333136 0.312016
+-0.0872115 0.885472 -0.352829 0.28956
+-0.144937 0.877872 -0.371012 0.265863
+-0.202043 0.866513 -0.387606 0.241029
+-0.258283 0.851444 -0.40254 0.215162
+-0.313417 0.832728 -0.415751 0.188374
+-0.367209 0.810447 -0.427181 0.160779
+-0.419428 0.784695 -0.436781 0.132496
+-0.469852 0.755583 -0.444512 0.103646
+-0.518263 0.723236 -0.450339 0.0743512
+-0.564456 0.687791 -0.454238 0.0447385
+-0.608231 0.649401 -0.456191 0.0149341
+-0.649401 0.608231 -0.456191 -0.0149341
+-0.687791 0.564456 -0.454238 -0.0447385
+-0.723236 0.518263 -0.450339 -0.0743512
+-0.755583 0.469852 -0.444512 -0.103646
+-0.784695 0.419428 -0.436782 -0.132496
+-0.810447 0.367209 -0.427181 -0.160779
+-0.832728 0.313417 -0.415751 -0.188374
+-0.851444 0.258283 -0.40254 -0.215162
+-0.866513 0.202043 -0.387606 -0.241029
+-0.877872 0.144937 -0.371012 -0.265863
+-0.885472 0.0872113 -0.352829 -0.28956
+-0.88928 0.0291121 -0.333136 -0.312016
+0.841175 0.0275372 0.190237 0.505447
+0.837573 0.0824937 0.156772 0.516807
+0.830384 0.137097 0.122635 0.525954
+0.81964 0.191113 0.0879736 0.532848
+0.805385 0.244311 0.0529353 0.537461
+0.787682 0.296463 0.0176703 0.539773
+0.766606 0.347345 -0.0176703 0.539773
+0.742247 0.396739 -0.0529353 0.537461
+0.71471 0.444435 -0.0879736 0.532848
+0.684112 0.490228 -0.122635 0.525954
+0.650585 0.533921 -0.156772 0.516807
+0.614272 0.575329 -0.190237 0.505447
+0.575329 0.614272 -0.222887 0.491923
+0.533922 0.650585 -0.254583 0.476292
+0.490228 0.684112 -0.285189 0.458622
+0.444435 0.71471 -0.314574 0.438987
+0.396739 0.742247 -0.342612 0.417473
+0.347345 0.766606 -0.369182 0.394172
+0.296463 0.787682 -0.394172 0.369182
+0.244311 0.805385 -0.417473 0.342612
+0.191113 0.81964 -0.438987 0.314574
+0.137097 0.830384 -0.458622 0.285189
+0.0824936 0.837573 -0.476292 0.254583
+0.0275371 0.841175 -0.491923 0.222887
+-0.0275373 0.841175 -0.505447 0.190237
+-0.0824938 0.837573 -0.516807 0.156772
+-0.137097 0.830384 -0.525954 0.122635
+-0.191113 0.81964 -0.532848 0.0879736
+-0.244311 0.805385 -0.537461 0.0529352
+-0.296463 0.787682 -0.539773 0.0176702
+-0.347345 0.766606 -0.539773 -0.0176704
+-0.396739 0.742247 -0.537461 -0.0529352
+-0.444435 0.71471 -0.532848 -0.0879736
+-0.490228 0.684112 -0.525954 -0.122635
+-0.533921 0.650585 -0.516807 -0.156772
+-0.575329 0.614272 -0.505447 -0.190237
+-0.614272 0.575329 -0.491923 -0.222887
+-0.650585 0.533921 -0.476292 -0.254583
+-0.684112 0.490228 -0.458622 -0.285189
+-0.71471 0.444435 -0.438987 -0.314574
+-0.742247 0.39674 -0.417473 -0.342611
+-0.766606 0.347345 -0.394172 -0.369182
+-0.787682 0.296463 -0.369182 -0.394172
+-0.805385 0.244311 -0.342612 -0.417473
+-0.81964 0.191113 -0.314574 -0.438987
+-0.830384 0.137097 -0.285189 -0.458622
+-0.837573 0.0824937 -0.254583 -0.476292
+-0.841175 0.0275373 -0.222887 -0.491923
+0.865562 0.0283356 0.12941 0.482963
+0.861855 0.0848853 0.0975452 0.490393
+0.854458 0.141072 0.0652631 0.495722
+0.843402 0.196654 0.0327016 0.498929
+0.828735 0.251394 -3.72653e-10 0.5
+0.810518 0.305057 -0.0327016 0.498929
+0.788831 0.357415 -0.0652631 0.495722
+0.763766 0.408242 -0.0975452 0.490393
+0.735431 0.45732 -0.12941 0.482963
+0.703946 0.50444 -0.16072 0.473465
+0.669447 0.549401 -0.191342 0.46194
+0.632081 0.592008 -0.221144 0.448436
+0.592008 0.632081 -0.25 0.433013
+0.549401 0.669447 -0.277785 0.415735
+0.50444 0.703946 -0.304381 0.396677
+0.45732 0.735431 -0.329673 0.37592
+0.408241 0.763766 -0.353553 0.353553
+0.357415 0.788831 -0.37592 0.329673
+0.305057 0.810518 -0.396677 0.304381
+0.251394 0.828735 -0.415735 0.277785
+0.196654 0.843402 -0.433013 0.25
+0.141072 0.854458 -0.448436 0.221144
+0.0848852 0.861855 -0.46194 0.191342
+0.0283355 0.865562 -0.473465 0.16072
+-0.0283356 0.865562 -0.482963 0.129409
+-0.0848854 0.861855 -0.490393 0.0975451
+-0.141072 0.854458 -0.495722 0.0652631
+-0.196654 0.843402 -0.498929 0.0327015
+-0.251394 0.828735 -0.5 -8.1833e-08
+-0.305058 0.810518 -0.498929 -0.0327016
+-0.357415 0.788831 -0.495722 -0.0652632
+-0.408241 0.763766 -0.490393 -0.0975451
+-0.45732 0.735431 -0.482963 -0.12941
+-0.504441 0.703946 -0.473465 -0.16072
+-0.549401 0.669447 -0.46194 -0.191342
+-0.592008 0.632081 -0.448436 -0.221144
+-0.632081 0.592008 -0.433013 -0.25
+-0.669447 0.549401 -0.415735 -0.277785
+-0.703946 0.504441 -0.396677 -0.304381
+-0.735431 0.45732 -0.37592 -0.329673
+-0.763766 0.408242 -0.353553 -0.353553
+-0.788831 0.357415 -0.329673 -0.37592
+-0.810518 0.305057 -0.304381 -0.396677
+-0.828735 0.251394 -0.277785 -0.415735
+-0.843402 0.196654 -0.25 -0.433013
+-0.854458 0.141072 -0.221144 -0.448436
+-0.861855 0.0848853 -0.191342 -0.46194
+-0.865562 0.0283356 -0.16072 -0.473465
+0.865562 0.0283356 0.221144 0.448436
+0.861855 0.0848853 0.191342 0.46194
+0.854458 0.141072 0.16072 0.473465
+0.843402 0.196654 0.12941 0.482963
+0.828735 0.251394 0.0975452 0.490393
+0.810518 0.305057 0.0652631 0.495722
+0.788831 0.357415 0.0327016 0.498929
+0.763766 0.408242 -1.05552e-08 0.5
+0.735431 0.45732 -0.0327016 0.498929
+0.703946 0.50444 -0.0652631 0.495722
+0.669447 0.549401 -0.0975452 0.490393
+0.632081 0.592008 -0.12941 0.482963
+0.592008 0.632081 -0.16072 0.473465
+0.549401 0.669447 -0.191342 0.46194
+0.50444 0.703946 -0.221144 0.448436
+0.45732 0.735431 -0.25 0.433013
+0.408241 0.763766 -0.277785 0.415735
+0.357415 0.788831 -0.304381 0.396677
+0.305057 0.810518 -0.329673 0.37592
+0.251394 0.828735 -0.353553 0.353553
+0.196654 0.843402 -0.37592 0.329673
+0.141072 0.854458 -0.396677 0.304381
+0.0848852 0.861855 -0.415735 0.277785
+0.0283355 0.865562 -0.433013 0.25
+-0.0283356 0.865562 -0.448436 0.221144
+-0.0848854 0.861855 -0.46194 0.191342
+-0.141072 0.854458 -0.473465 0.16072
+-0.196654 0.843402 -0.482963 0.129409
+-0.251394 0.828735 -0.490393 0.0975451
+-0.305058 0.810518 -0.495722 0.065263
+-0.357415 0.788831 -0.498929 0.0327015
+-0.408241 0.763766 -0.5 5.69961e-08
+-0.45732 0.735431 -0.498929 -0.0327016
+-0.504441 0.703946 -0.495722 -0.0652631
+-0.549401 0.669447 -0.490393 -0.0975451
+-0.592008 0.632081 -0.482963 -0.12941
+-0.632081 0.592008 -0.473465 -0.16072
+-0.669447 0.549401 -0.46194 -0.191342
+-0.703946 0.504441 -0.448436 -0.221144
+-0.735431 0.45732 -0.433013 -0.25
+-0.763766 0.408242 -0.415735 -0.277785
+-0.788831 0.357415 -0.396677 -0.304381
+-0.810518 0.305057 -0.37592 -0.329673
+-0.828735 0.251394 -0.353553 -0.353553
+-0.843402 0.196654 -0.329673 -0.37592
+-0.854458 0.141072 -0.304381 -0.396677
+-0.861855 0.0848853 -0.277785 -0.415735
+-0.865562 0.0283356 -0.25 -0.433013
+0.88928 0.029112 0.160779 0.427181
+0.885472 0.0872114 0.132496 0.436782
+0.877872 0.144937 0.103646 0.444512
+0.866513 0.202043 0.0743513 0.450339
+0.851444 0.258283 0.0447385 0.454238
+0.832728 0.313417 0.0149342 0.456191
+0.810447 0.367209 -0.0149342 0.456191
+0.784695 0.419428 -0.0447385 0.454238
+0.755583 0.469852 -0.0743513 0.450339
+0.723236 0.518263 -0.103646 0.444512
+0.687791 0.564456 -0.132496 0.436782
+0.649401 0.608231 -0.160779 0.427181
+0.608231 0.649401 -0.188374 0.415751
+0.564456 0.687791 -0.215162 0.40254
+0.518263 0.723236 -0.241029 0.387606
+0.469852 0.755583 -0.265864 0.371012
+0.419428 0.784695 -0.28956 0.352829
+0.367209 0.810447 -0.312016 0.333136
+0.313417 0.832728 -0.333136 0.312016
+0.258283 0.851444 -0.352829 0.28956
+0.202043 0.866513 -0.371012 0.265863
+0.144937 0.877872 -0.387606 0.241029
+0.0872113 0.885472 -0.40254 0.215162
+0.0291119 0.88928 -0.415751 0.188374
+-0.0291121 0.88928 -0.427181 0.160779
+-0.0872115 0.885472 -0.436782 0.132496
+-0.144937 0.877872 -0.444512 0.103646
+-0.202043 0.866513 -0.450339 0.0743512
+-0.258283 0.851444 -0.454238 0.0447384
+-0.313417 0.832728 -0.456191 0.0149341
+-0.367209 0.810447 -0.456191 -0.0149342
+-0.419428 0.784695 -0.454238 -0.0447384
+-0.469852 0.755583 -0.450339 -0.0743513
+-0.518263 0.723236 -0.444512 -0.103646
+-0.564456 0.687791 -0.436782 -0.132496
+-0.608231 0.649401 -0.427181 -0.160779
+-0.649401 0.608231 -0.415751 -0.188374
+-0.687791 0.564456 -0.40254 -0.215162
+-0.723236 0.518263 -0.387606 -0.241029
+-0.755583 0.469852 -0.371012 -0.265863
+-0.784695 0.419428 -0.352829 -0.28956
+-0.810447 0.367209 -0.333136 -0.312016
+-0.832728 0.313417 -0.312016 -0.333136
+-0.851444 0.258283 -0.28956 -0.352829
+-0.866513 0.202043 -0.265863 -0.371012
+-0.877872 0.144937 -0.241029 -0.387606
+-0.885472 0.0872113 -0.215162 -0.40254
+-0.88928 0.0291121 -0.188374 -0.415751
+0.88928 0.029112 0.0743513 0.450339
+0.885472 0.0872114 0.0447385 0.454238
+0.877872 0.144937 0.0149342 0.456191
+0.866513 0.202043 -0.0149342 0.456191
+0.851444 0.258283 -0.0447385 0.454238
+0.832728 0.313417 -0.0743513 0.450339
+0.810447 0.367209 -0.103646 0.444512
+0.784695 0.419428 -0.132496 0.436782
+0.755583 0.469852 -0.160779 0.427181
+0.723236 0.518263 -0.188374 0.415751
+0.687791 0.564456 -0.215162 0.40254
+0.649401 0.608231 -0.241029 0.387606
+0.608231 0.649401 -0.265863 0.371012
+0.564456 0.687791 -0.28956 0.352829
+0.518263 0.723236 -0.312016 0.333136
+0.469852 0.755583 -0.333136 0.312016
+0.419428 0.784695 -0.352829 0.28956
+0.367209 0.810447 -0.371012 0.265863
+0.313417 0.832728 -0.387606 0.241029
+0.258283 0.851444 -0.40254 0.215162
+0.202043 0.866513 -0.415751 0.188374
+0.144937 0.877872 -0.427181 0.160779
+0.0872113 0.885472 -0.436782 0.132496
+0.0291119 0.88928 -0.444512 0.103646
+-0.0291121 0.88928 -0.450339 0.0743512
+-0.0872115 0.885472 -0.454238 0.0447385
+-0.144937 0.877872 -0.456191 0.0149341
+-0.202043 0.866513 -0.456191 -0.0149342
+-0.258283 0.851444 -0.454238 -0.0447386
+-0.313417 0.832728 -0.450339 -0.0743513
+-0.367209 0.810447 -0.444512 -0.103646
+-0.419428 0.784695 -0.436782 -0.132496
+-0.469852 0.755583 -0.427181 -0.160779
+-0.518263 0.723236 -0.415751 -0.188374
+-0.564456 0.687791 -0.40254 -0.215162
+-0.608231 0.649401 -0.387606 -0.241029
+-0.649401 0.608231 -0.371012 -0.265863
+-0.687791 0.564456 -0.352829 -0.28956
+-0.723236 0.518263 -0.333136 -0.312016
+-0.755583 0.469852 -0.312016 -0.333136
+-0.784695 0.419428 -0.28956 -0.352829
+-0.810447 0.367209 -0.265863 -0.371012
+-0.832728 0.313417 -0.241029 -0.387606
+-0.851444 0.258283 -0.215162 -0.40254
+-0.866513 0.202043 -0.188374 -0.415751
+-0.877872 0.144937 -0.160779 -0.427181
+-0.885472 0.0872113 -0.132496 -0.436782
+-0.88928 0.0291121 -0.103646 -0.444512
+0.912382 0.0298683 0.0267007 0.407374
+0.908475 0.089477 -1.11532e-09 0.408248
+0.900678 0.148703 -0.0267007 0.407374
+0.889024 0.207291 -0.0532871 0.404756
+0.873563 0.264992 -0.0796453 0.400404
+0.854361 0.321559 -0.105662 0.394338
+0.831501 0.376748 -0.131227 0.386583
+0.80508 0.430324 -0.15623 0.377172
+0.775212 0.482058 -0.180564 0.366147
+0.742024 0.531727 -0.204124 0.353553
+0.705659 0.579119 -0.226811 0.339446
+0.666272 0.624032 -0.248526 0.323885
+0.624032 0.666272 -0.269177 0.306937
+0.579119 0.705659 -0.288675 0.288675
+0.531727 0.742024 -0.306937 0.269177
+0.482058 0.775212 -0.323885 0.248526
+0.430324 0.80508 -0.339446 0.226811
+0.376748 0.831501 -0.353553 0.204124
+0.321559 0.854361 -0.366147 0.180564
+0.264992 0.873563 -0.377172 0.15623
+0.207291 0.889024 -0.386583 0.131227
+0.148702 0.900678 -0.394338 0.105662
+0.0894769 0.908475 -0.400404 0.0796452
+0.0298682 0.912382 -0.404756 0.0532871
+-0.0298684 0.912382 -0.407374 0.0267007
+-0.0894771 0.908475 -0.408248 -3.41689e-08
+-0.148703 0.900678 -0.407374 -0.0267007
+-0.207291 0.889024 -0.404756 -0.0532871
+-0.264993 0.873563 -0.400404 -0.0796454
+-0.321559 0.854361 -0.394338 -0.105662
+-0.376748 0.831501 -0.386583 -0.131227
+-0.430324 0.80508 -0.377172 -0.15623
+-0.482058 0.775212 -0.366147 -0.180564
+-0.531727 0.742024 -0.353553 -0.204124
+-0.579119 0.705659 -0.339446 -0.226811
+-0.624032 0.666272 -0.323885 -0.248526
+-0.666272 0.624032 -0.306937 -0.269177
+-0.705659 0.579119 -0.288675 -0.288675
+-0.742024 0.531727 -0.269177 -0.306937
+-0.775212 0.482058 -0.248526 -0.323885
+-0.80508 0.430324 -0.226811 -0.339446
+-0.831501 0.376748 -0.204124 -0.353553
+-0.854361 0.321559 -0.180564 -0.366147
+-0.873563 0.264992 -0.15623 -0.377172
+-0.889024 0.207291 -0.131227 -0.386583
+-0.900678 0.148703 -0.105662 -0.394338
+-0.908475 0.089477 -0.0796453 -0.400404
+-0.912382 0.0298684 -0.0532871 -0.404756
+0.912382 0.0298683 0.105662 0.394338
+0.908475 0.089477 0.0796453 0.400404
+0.900678 0.148703 0.0532871 0.404756
+0.889024 0.207291 0.0267007 0.407374
+0.873563 0.264992 -3.0427e-10 0.408248
+0.854361 0.321559 -0.0267007 0.407374
+0.831501 0.376748 -0.0532871 0.404756
+0.80508 0.430324 -0.0796453 0.400404
+0.775212 0.482058 -0.105662 0.394338
+0.742024 0.531727 -0.131227 0.386583
+0.705659 0.579119 -0.15623 0.377172
+0.666272 0.624032 -0.180564 0.366147
+0.624032 0.666272 -0.204124 0.353553
+0.579119 0.705659 -0.226811 0.339446
+0.531727 0.742024 -0.248526 0.323885
+0.482058 0.775212 -0.269177 0.306937
+0.430324 0.80508 -0.288675 0.288675
+0.376748 0.831501 -0.306937 0.269177
+0.321559 0.854361 -0.323885 0.248526
+0.264992 0.873563 -0.339446 0.226811
+0.207291 0.889024 -0.353553 0.204124
+0.148702 0.900678 -0.366147 0.180564
+0.0894769 0.908475 -0.377172 0.15623
+0.0298682 0.912382 -0.386583 0.131227
+-0.0298684 0.912382 -0.394338 0.105662
+-0.0894771 0.908475 -0.400404 0.0796453
+-0.148703 0.900678 -0.404756 0.0532871
+-0.207291 0.889024 -0.407374 0.0267007
+-0.264993 0.873563 -0.408248 -6.68164e-08
+-0.321559 0.854361 -0.407374 -0.0267008
+-0.376748 0.831501 -0.404756 -0.0532872
+-0.430324 0.80508 -0.400404 -0.0796452
+-0.482058 0.775212 -0.394338 -0.105662
+-0.531727 0.742024 -0.386583 -0.131227
+-0.579119 0.705659 -0.377172 -0.15623
+-0.624032 0.666272 -0.366147 -0.180564
+-0.666272 0.624032 -0.353553 -0.204124
+-0.705659 0.579119 -0.339446 -0.226811
+-0.742024 0.531727 -0.323885 -0.248526
+-0.775212 0.482058 -0.306937 -0.269177
+-0.80508 0.430324 -0.288675 -0.288675
+-0.831501 0.376748 -0.269177 -0.306937
+-0.854361 0.321559 -0.248526 -0.323885
+-0.873563 0.264992 -0.226811 -0.339446
+-0.889024 0.207291 -0.204124 -0.353553
+-0.900678 0.148703 -0.180564 -0.366147
+-0.908475 0.089477 -0.15623 -0.377172
+-0.912382 0.0298684 -0.131227 -0.386583
+0.933521 0.0305603 0.0283599 0.35609
+0.929524 0.0915501 0.0050098 0.357182
+0.921546 0.152148 -0.0183618 0.356745
+0.909622 0.212094 -0.0416547 0.35478
+0.893803 0.271132 -0.0647692 0.351296
+0.874156 0.329009 -0.0876064 0.346308
+0.850766 0.385477 -0.110069 0.339837
+0.823733 0.440295 -0.132059 0.33191
+0.793173 0.493227 -0.153484 0.322563
+0.759216 0.544047 -0.174252 0.311834
+0.722008 0.592537 -0.194274 0.299769
+0.681709 0.63849 -0.213464 0.286421
+0.63849 0.681709 -0.23174 0.271847
+0.592537 0.722008 -0.249023 0.256108
+0.544047 0.759216 -0.265241 0.239273
+0.493227 0.793173 -0.280322 0.221413
+0.440295 0.823733 -0.294203 0.202605
+0.385477 0.850766 -0.306824 0.18293
+0.329009 0.874156 -0.318131 0.162471
+0.271132 0.893803 -0.328076 0.141316
+0.212094 0.909622 -0.336616 0.119556
+0.152148 0.921546 -0.343715 0.0972846
+0.09155 0.929524 -0.349342 0.0745963
+0.0305602 0.933521 -0.353472 0.0515885
+-0.0305604 0.933521 -0.35609 0.0283599
+-0.0915502 0.929524 -0.357182 0.00500977
+-0.152148 0.921546 -0.356745 -0.0183618
+-0.212094 0.909622 -0.35478 -0.0416547
+-0.271132 0.893803 -0.351296 -0.0647693
+-0.329009 0.874156 -0.346308 -0.0876065
+-0.385477 0.850766 -0.339837 -0.110069
+-0.440295 0.823733 -0.33191 -0.132059
+-0.493227 0.793173 -0.322563 -0.153484
+-0.544047 0.759216 -0.311834 -0.174252
+-0.592537 0.722008 -0.299769 -0.194274
+-0.63849 0.681709 -0.286421 -0.213464
+-0.681709 0.63849 -0.271847 -0.23174
+-0.722008 0.592537 -0.256108 -0.249023
+-0.759216 0.544047 -0.239273 -0.265241
+-0.793173 0.493227 -0.221413 -0.280322
+-0.823733 0.440295 -0.202605 -0.294203
+-0.850766 0.385477 -0.18293 -0.306824
+-0.874156 0.329009 -0.162471 -0.318131
+-0.893803 0.271132 -0.141316 -0.328076
+-0.909622 0.212094 -0.119556 -0.336616
+-0.921546 0.152148 -0.0972846 -0.343715
+-0.929524 0.0915501 -0.0745963 -0.349342
+-0.933521 0.0305604 -0.0515886 -0.353472
+0.88928 0.029112 0.241029 0.387606
+0.885472 0.0872114 0.215162 0.40254
+0.877872 0.144937 0.188374 0.415751
+0.866513 0.202043 0.160779 0.427181
+0.851444 0.258283 0.132496 0.436782
+0.832728 0.313417 0.103646 0.444512
+0.810447 0.367209 0.0743513 0.450339
+0.784695 0.419428 0.0447385 0.454238
+0.755583 0.469852 0.0149341 0.456191
+0.723236 0.518263 -0.0149341 0.456191
+0.687791 0.564456 -0.0447385 0.454238
+0.649401 0.608231 -0.0743513 0.450339
+0.608231 0.649401 -0.103646 0.444512
+0.564456 0.687791 -0.132496 0.436782
+0.518263 0.723236 -0.160779 0.427181
+0.469852 0.755583 -0.188374 0.415751
+0.419428 0.784695 -0.215162 0.40254
+0.367209 0.810447 -0.241029 0.387606
+0.313417 0.832728 -0.265863 0.371012
+0.258283 0.851444 -0.28956 0.352829
+0.202043 0.866513 -0.312016 0.333136
+0.144937 0.877872 -0.333136 0.312016
+0.0872113 0.885472 -0.352829 0.28956
+0.0291119 0.88928 -0.371012 0.265863
+-0.0291121 0.88928 -0.387606 0.241029
+-0.0872115 0.885472 -0.40254 0.215162
+-0.144937 0.877872 -0.415751 0.188374
+-0.202043 0.866513 -0.427181 0.160779
+-0.258283 0.851444 -0.436782 0.132496
+-0.313417 0.832728 -0.444512 0.103646
+-0.367209 0.810447 -0.450339 0.0743512
+-0.419428 0.784695 -0.454238 0.0447386
+-0.469852 0.755583 -0.456191 0.0149342
+-0.518263 0.723236 -0.456191 -0.0149342
+-0.564456 0.687791 -0.454238 -0.0447385
+-0.608231 0.649401 -0.450339 -0.0743513
+-0.649401 0.608231 -0.444512 -0.103646
+-0.687791 0.564456 -0.436782 -0.132496
+-0.723236 0.518263 -0.427181 -0.160779
+-0.755583 0.469852 -0.415751 -0.188374
+-0.784695 0.419428 -0.40254 -0.215162
+-0.810447 0.367209 -0.387606 -0.241029
+-0.832728 0.313417 -0.371012 -0.265864
+-0.851444 0.258283 -0.352829 -0.28956
+-0.866513 0.202043 -0.333136 -0.312016
+-0.877872 0.144937 -0.312016 -0.333136
+-0.885472 0.0872113 -0.28956 -0.352829
+-0.88928 0.0291121 -0.265864 -0.371012
+0.912382 0.0298683 0.180564 0.366147
+0.908475 0.089477 0.15623 0.377172
+0.900678 0.148703 0.131227 0.386583
+0.889024 0.207291 0.105662 0.394338
+0.873563 0.264992 0.0796453 0.400404
+0.854361 0.321559 0.0532871 0.404756
+0.831501 0.376748 0.0267007 0.407374
+0.80508 0.430324 -8.61828e-09 0.408248
+0.775212 0.482058 -0.0267007 0.407374
+0.742024 0.531727 -0.0532871 0.404756
+0.705659 0.579119 -0.0796453 0.400404
+0.666272 0.624032 -0.105662 0.394338
+0.624032 0.666272 -0.131227 0.386583
+0.579119 0.705659 -0.15623 0.377172
+0.531727 0.742024 -0.180564 0.366147
+0.482058 0.775212 -0.204124 0.353553
+0.430324 0.80508 -0.226811 0.339446
+0.376748 0.831501 -0.248526 0.323885
+0.321559 0.854361 -0.269177 0.306937
+0.264992 0.873563 -0.288675 0.288675
+0.207291 0.889024 -0.306937 0.269177
+0.148702 0.900678 -0.323885 0.248526
+0.0894769 0.908475 -0.339446 0.226811
+0.0298682 0.912382 -0.353553 0.204124
+-0.0298684 0.912382 -0.366147 0.180564
+-0.0894771 0.908475 -0.377172 0.15623
+-0.148703 0.900678 -0.386583 0.131227
+-0.207291 0.889024 -0.394338 0.105662
+-0.264993 0.873563 -0.400404 0.0796452
+-0.321559 0.854361 -0.404756 0.053287
+-0.376748 0.831501 -0.407374 0.0267007
+-0.430324 0.80508 -0.408248 4.65371e-08
+-0.482058 0.775212 -0.407374 -0.0267007
+-0.531727 0.742024 -0.404756 -0.0532871
+-0.579119 0.705659 -0.400404 -0.0796453
+-0.624032 0.666272 -0.394338 -0.105662
+-0.666272 0.624032 -0.386583 -0.131227
+-0.705659 0.579119 -0.377172 -0.15623
+-0.742024 0.531727 -0.366147 -0.180564
+-0.775212 0.482058 -0.353553 -0.204124
+-0.80508 0.430324 -0.339446 -0.226811
+-0.831501 0.376748 -0.323885 -0.248526
+-0.854361 0.321559 -0.306937 -0.269177
+-0.873563 0.264992 -0.288675 -0.288675
+-0.889024 0.207291 -0.269177 -0.306937
+-0.900678 0.148703 -0.248526 -0.323885
+-0.908475 0.089477 -0.226811 -0.339446
+-0.912382 0.0298684 -0.204124 -0.353553
+0.912382 0.0298683 0.248526 0.323885
+0.908475 0.089477 0.226811 0.339446
+0.900678 0.148703 0.204124 0.353553
+0.889024 0.207291 0.180564 0.366147
+0.873563 0.264992 0.15623 0.377172
+0.854361 0.321559 0.131227 0.386583
+0.831501 0.376748 0.105662 0.394338
+0.80508 0.430324 0.0796453 0.400404
+0.775212 0.482058 0.0532871 0.404756
+0.742024 0.531727 0.0267007 0.407374
+0.705659 0.579119 7.4012e-09 0.408248
+0.666272 0.624032 -0.0267007 0.407374
+0.624032 0.666272 -0.0532871 0.404756
+0.579119 0.705659 -0.0796453 0.400404
+0.531727 0.742024 -0.105662 0.394338
+0.482058 0.775212 -0.131227 0.386583
+0.430324 0.80508 -0.15623 0.377172
+0.376748 0.831501 -0.180564 0.366147
+0.321559 0.854361 -0.204124 0.353553
+0.264992 0.873563 -0.226811 0.339446
+0.207291 0.889024 -0.248526 0.323885
+0.148702 0.900678 -0.269177 0.306937
+0.0894769 0.908475 -0.288675 0.288675
+0.0298682 0.912382 -0.306937 0.269177
+-0.0298684 0.912382 -0.323885 0.248526
+-0.0894771 0.908475 -0.339446 0.226811
+-0.148703 0.900678 -0.353553 0.204124
+-0.207291 0.889024 -0.366147 0.180564
+-0.264993 0.873563 -0.377172 0.15623
+-0.321559 0.854361 -0.386583 0.131227
+-0.376748 0.831501 -0.394338 0.105662
+-0.430324 0.80508 -0.400404 0.0796453
+-0.482058 0.775212 -0.404756 0.0532871
+-0.531727 0.742024 -0.407374 0.0267007
+-0.579119 0.705659 -0.408248 1.38896e-08
+-0.624032 0.666272 -0.407374 -0.0267007
+-0.666272 0.624032 -0.404756 -0.0532871
+-0.705659 0.579119 -0.400404 -0.0796453
+-0.742024 0.531727 -0.394338 -0.105662
+-0.775212 0.482058 -0.386583 -0.131227
+-0.80508 0.430324 -0.377172 -0.15623
+-0.831501 0.376748 -0.366147 -0.180564
+-0.854361 0.321559 -0.353553 -0.204124
+-0.873563 0.264992 -0.339446 -0.226811
+-0.889024 0.207291 -0.323885 -0.248526
+-0.900678 0.148703 -0.306937 -0.269177
+-0.908475 0.089477 -0.288675 -0.288675
+-0.912382 0.0298684 -0.269177 -0.306937
+0.933521 0.0305603 0.180053 0.308521
+0.929524 0.0915501 0.159489 0.319636
+0.921546 0.152148 0.138242 0.329383
+0.909622 0.212094 0.116404 0.337719
+0.893803 0.271132 0.0940667 0.344609
+0.874156 0.329009 0.0713268 0.350024
+0.850766 0.385477 0.0482814 0.353939
+0.823733 0.440295 0.0250293 0.356339
+0.793173 0.493227 0.00166998 0.357213
+0.759216 0.544047 -0.0216965 0.356558
+0.722008 0.592537 -0.04497 0.354375
+0.681709 0.63849 -0.068051 0.350675
+0.63849 0.681709 -0.0908405 0.345474
+0.592537 0.722008 -0.113241 0.338793
+0.544047 0.759216 -0.135157 0.330661
+0.493227 0.793173 -0.156494 0.321114
+0.440295 0.823733 -0.17716 0.310191
+0.385477 0.850766 -0.197069 0.29794
+0.329009 0.874156 -0.216133 0.284413
+0.271132 0.893803 -0.234272 0.269668
+0.212094 0.909622 -0.251407 0.253769
+0.152148 0.921546 -0.267466 0.236783
+0.09155 0.929524 -0.28238 0.218783
+0.0305602 0.933521 -0.296084 0.199846
+-0.0305604 0.933521 -0.308521 0.180053
+-0.0915502 0.929524 -0.319636 0.159489
+-0.152148 0.921546 -0.329383 0.138242
+-0.212094 0.909622 -0.337719 0.116404
+-0.271132 0.893803 -0.344609 0.0940666
+-0.329009 0.874156 -0.350024 0.0713267
+-0.385477 0.850766 -0.353939 0.0482813
+-0.440295 0.823733 -0.356339 0.0250293
+-0.493227 0.793173 -0.357213 0.00166998
+-0.544047 0.759216 -0.356558 -0.0216965
+-0.592537 0.722008 -0.354375 -0.04497
+-0.63849 0.681709 -0.350675 -0.068051
+-0.681709 0.63849 -0.345474 -0.0908405
+-0.722008 0.592537 -0.338793 -0.113241
+-0.759216 0.544047 -0.330661 -0.135157
+-0.793173 0.493227 -0.321114 -0.156494
+-0.823733 0.440295 -0.310191 -0.17716
+-0.850766 0.385477 -0.29794 -0.197069
+-0.874156 0.329009 -0.284413 -0.216133
+-0.893803 0.271132 -0.269668 -0.234272
+-0.909622 0.212094 -0.253769 -0.251407
+-0.921546 0.152148 -0.236783 -0.267466
+-0.929524 0.0915501 -0.218783 -0.28238
+-0.933521 0.0305604 -0.199846 -0.296084
+0.933521 0.0305603 0.106886 0.340851
+0.929524 0.0915501 0.0843647 0.347112
+0.921546 0.152148 0.0614818 0.351887
+0.909622 0.212094 0.0383357 0.355154
+0.893803 0.271132 0.0150254 0.356901
+0.874156 0.329009 -0.00834917 0.35712
+0.850766 0.385477 -0.031688 0.355809
+0.823733 0.440295 -0.0548912 0.352975
+0.793173 0.493227 -0.0778593 0.348629
+0.759216 0.544047 -0.100494 0.34279
+0.722008 0.592537 -0.122698 0.335484
+0.681709 0.63849 -0.144377 0.32674
+0.63849 0.681709 -0.165438 0.316598
+0.592537 0.722008 -0.18579 0.3051
+0.544047 0.759216 -0.205347 0.292296
+0.493227 0.793173 -0.224025 0.278239
+0.440295 0.823733 -0.241743 0.262992
+0.385477 0.850766 -0.258426 0.246618
+0.329009 0.874156 -0.274002 0.229188
+0.271132 0.893803 -0.288405 0.210777
+0.212094 0.909622 -0.301573 0.191463
+0.152148 0.921546 -0.313449 0.171329
+0.09155 0.929524 -0.323984 0.150462
+0.0305602 0.933521 -0.333131 0.12895
+-0.0305604 0.933521 -0.340851 0.106886
+-0.0915502 0.929524 -0.347112 0.0843647
+-0.152148 0.921546 -0.351887 0.0614818
+-0.212094 0.909622 -0.355154 0.0383357
+-0.271132 0.893803 -0.356901 0.0150254
+-0.329009 0.874156 -0.35712 -0.00834923
+-0.385477 0.850766 -0.355809 -0.0316881
+-0.440295 0.823733 -0.352975 -0.0548912
+-0.493227 0.793173 -0.348629 -0.0778593
+-0.544047 0.759216 -0.34279 -0.100494
+-0.592537 0.722008 -0.335484 -0.122698
+-0.63849 0.681709 -0.32674 -0.144377
+-0.681709 0.63849 -0.316598 -0.165438
+-0.722008 0.592537 -0.3051 -0.18579
+-0.759216 0.544047 -0.292296 -0.205347
+-0.793173 0.493227 -0.278239 -0.224025
+-0.823733 0.440295 -0.262992 -0.241743
+-0.850766 0.385477 -0.246618 -0.258425
+-0.874156 0.329009 -0.229188 -0.274002
+-0.893803 0.271132 -0.210777 -0.288405
+-0.909622 0.212094 -0.191463 -0.301573
+-0.921546 0.152148 -0.171329 -0.313449
+-0.929524 0.0915501 -0.150462 -0.323984
+-0.933521 0.0305604 -0.12895 -0.333131
+0.951462 0.0311476 0.0300115 0.304712
+0.947388 0.0933095 0.0100181 0.306022
+0.939256 0.155072 -0.0100181 0.306022
+0.927103 0.21617 -0.0300115 0.304712
+0.91098 0.276343 -0.0498763 0.302097
+0.890956 0.335332 -0.0695276 0.298188
+0.867117 0.392885 -0.0888812 0.293002
+0.839564 0.448756 -0.107854 0.286561
+0.808416 0.502706 -0.126365 0.278894
+0.773807 0.554502 -0.144335 0.270032
+0.735884 0.603924 -0.161687 0.260014
+0.69481 0.650761 -0.178347 0.248882
+0.65076 0.69481 -0.194242 0.236685
+0.603924 0.735884 -0.209307 0.223474
+0.554502 0.773807 -0.223474 0.209307
+0.502706 0.808416 -0.236685 0.194242
+0.448756 0.839564 -0.248882 0.178347
+0.392885 0.867117 -0.260014 0.161687
+0.335332 0.890956 -0.270032 0.144335
+0.276343 0.91098 -0.278894 0.126365
+0.21617 0.927103 -0.286561 0.107854
+0.155072 0.939256 -0.293002 0.0888811
+0.0933094 0.947388 -0.298188 0.0695276
+0.0311475 0.951462 -0.302097 0.0498763
+-0.0311477 0.951462 -0.304712 0.0300115
+-0.0933096 0.947388 -0.306022 0.0100181
+-0.155072 0.939256 -0.306022 -0.0100182
+-0.21617 0.927103 -0.304712 -0.0300115
+-0.276343 0.91098 -0.302097 -0.0498764
+-0.335332 0.890956 -0.298188 -0.0695277
+-0.392886 0.867116 -0.293002 -0.0888812
+-0.448756 0.839564 -0.286562 -0.107854
+-0.502706 0.808416 -0.278894 -0.126365
+-0.554502 0.773807 -0.270032 -0.144335
+-0.603924 0.735884 -0.260014 -0.161687
+-0.650761 0.69481 -0.248882 -0.178347
+-0.69481 0.650761 -0.236685 -0.194242
+-0.735884 0.603924 -0.223474 -0.209307
+-0.773807 0.554502 -0.209307 -0.223474
+-0.808416 0.502706 -0.194242 -0.236685
+-0.839564 0.448756 -0.178347 -0.248882
+-0.867117 0.392885 -0.161687 -0.260014
+-0.890956 0.335332 -0.144335 -0.270032
+-0.91098 0.276343 -0.126365 -0.278894
+-0.927103 0.21617 -0.107854 -0.286562
+-0.939256 0.155072 -0.0888812 -0.293002
+-0.947388 0.0933095 -0.0695276 -0.298188
+-0.951462 0.0311477 -0.0498764 -0.302097
+0.951462 0.0311476 0.107854 0.286561
+0.947388 0.0933095 0.0888812 0.293002
+0.939256 0.155072 0.0695276 0.298188
+0.927103 0.21617 0.0498763 0.302097
+0.91098 0.276343 0.0300115 0.304712
+0.890956 0.335332 0.0100181 0.306022
+0.867117 0.392885 -0.0100181 0.306022
+0.839564 0.448756 -0.0300115 0.304712
+0.808416 0.502706 -0.0498764 0.302097
+0.773807 0.554502 -0.0695276 0.298188
+0.735884 0.603924 -0.0888812 0.293002
+0.69481 0.650761 -0.107854 0.286561
+0.65076 0.69481 -0.126365 0.278894
+0.603924 0.735884 -0.144335 0.270032
+0.554502 0.773807 -0.161687 0.260014
+0.502706 0.808416 -0.178347 0.248882
+0.448756 0.839564 -0.194242 0.236685
+0.392885 0.867117 -0.209307 0.223474
+0.335332 0.890956 -0.223474 0.209307
+0.276343 0.91098 -0.236685 0.194242
+0.21617 0.927103 -0.248882 0.178347
+0.155072 0.939256 -0.260014 0.161687
+0.0933094 0.947388 -0.270032 0.144335
+0.0311475 0.951462 -0.278894 0.126365
+-0.0311477 0.951462 -0.286562 0.107854
+-0.0933096 0.947388 -0.293002 0.0888811
+-0.155072 0.939256 -0.298188 0.0695276
+-0.21617 0.927103 -0.302097 0.0498763
+-0.276343 0.91098 -0.304712 0.0300114
+-0.335332 0.890956 -0.306022 0.0100181
+-0.392886 0.867116 -0.306022 -0.0100182
+-0.448756 0.839564 -0.304712 -0.0300115
+-0.502706 0.808416 -0.302097 -0.0498763
+-0.554502 0.773807 -0.298188 -0.0695277
+-0.603924 0.735884 -0.293002 -0.0888812
+-0.650761 0.69481 -0.286561 -0.107854
+-0.69481 0.650761 -0.278894 -0.126365
+-0.735884 0.603924 -0.270032 -0.144335
+-0.773807 0.554502 -0.260014 -0.161687
+-0.808416 0.502706 -0.248882 -0.178347
+-0.839564 0.448756 -0.236685 -0.194242
+-0.867117 0.392885 -0.223474 -0.209307
+-0.890956 0.335332 -0.209307 -0.223474
+-0.91098 0.276343 -0.194242 -0.236685
+-0.927103 0.21617 -0.178347 -0.248882
+-0.939256 0.155072 -0.161687 -0.260014
+-0.947388 0.0933095 -0.144335 -0.270032
+-0.951462 0.0311477 -0.126365 -0.278894
+0.966382 0.0316361 0.031648 0.253185
+0.962244 0.0947728 0.0150212 0.254713
+0.953986 0.157504 -0.00166997 0.25515
+0.941642 0.21956 -0.018354 0.254494
+0.925266 0.280676 -0.0349594 0.252749
+0.904928 0.340591 -0.0514151 0.249921
+0.880714 0.399046 -0.0676507 0.246023
+0.85273 0.455794 -0.0835965 0.241072
+0.821094 0.510589 -0.0991844 0.235089
+0.785942 0.563198 -0.114348 0.228098
+0.747424 0.613395 -0.129021 0.220131
+0.705706 0.660965 -0.143142 0.211221
+0.660965 0.705706 -0.15665 0.201407
+0.613395 0.747424 -0.169487 0.190731
+0.563198 0.785942 -0.181599 0.179237
+0.510589 0.821094 -0.192933 0.166976
+0.455793 0.85273 -0.203441 0.154
+0.399046 0.880714 -0.213077 0.140365
+0.340591 0.904928 -0.221801 0.126129
+0.280676 0.925266 -0.229575 0.111352
+0.21956 0.941642 -0.236367 0.0960987
+0.157504 0.953986 -0.242146 0.0804338
+0.0947727 0.962244 -0.246888 0.0644245
+0.031636 0.966382 -0.250573 0.0481393
+-0.0316362 0.966382 -0.253185 0.031648
+-0.0947729 0.962244 -0.254713 0.0150212
+-0.157504 0.953986 -0.25515 -0.00166999
+-0.21956 0.941642 -0.254494 -0.018354
+-0.280676 0.925266 -0.252749 -0.0349595
+-0.340591 0.904927 -0.249921 -0.0514152
+-0.399047 0.880714 -0.246023 -0.0676507
+-0.455793 0.85273 -0.241072 -0.0835965
+-0.510589 0.821094 -0.235089 -0.0991844
+-0.563198 0.785941 -0.228098 -0.114348
+-0.613395 0.747424 -0.220131 -0.129021
+-0.660966 0.705706 -0.211221 -0.143142
+-0.705706 0.660966 -0.201407 -0.15665
+-0.747424 0.613395 -0.190731 -0.169487
+-0.785942 0.563198 -0.179237 -0.181599
+-0.821094 0.510589 -0.166976 -0.192933
+-0.85273 0.455794 -0.154 -0.20344
+-0.880714 0.399046 -0.140365 -0.213077
+-0.904928 0.340591 -0.126129 -0.221801
+-0.925266 0.280676 -0.111352 -0.229575
+-0.941642 0.21956 -0.0960987 -0.236367
+-0.953986 0.157504 -0.0804339 -0.242146
+-0.962244 0.0947727 -0.0644245 -0.246888
+-0.966382 0.0316362 -0.0481394 -0.250573
+0.841175 0.0275372 0.491923 0.222887
+0.837573 0.0824937 0.476292 0.254583
+0.830384 0.137097 0.458622 0.285189
+0.81964 0.191113 0.438987 0.314574
+0.805385 0.244311 0.417473 0.342612
+0.787682 0.296463 0.394172 0.369182
+0.766606 0.347345 0.369182 0.394172
+0.742247 0.396739 0.342612 0.417473
+0.71471 0.444435 0.314574 0.438987
+0.684112 0.490228 0.285189 0.458622
+0.650585 0.533921 0.254583 0.476292
+0.614272 0.575329 0.222887 0.491923
+0.575329 0.614272 0.190237 0.505447
+0.533922 0.650585 0.156772 0.516807
+0.490228 0.684112 0.122635 0.525954
+0.444435 0.71471 0.0879736 0.532848
+0.396739 0.742247 0.0529353 0.537461
+0.347345 0.766606 0.0176703 0.539773
+0.296463 0.787682 -0.0176704 0.539773
+0.244311 0.805385 -0.0529353 0.537461
+0.191113 0.81964 -0.0879736 0.532848
+0.137097 0.830384 -0.122635 0.525954
+0.0824936 0.837573 -0.156772 0.516807
+0.0275371 0.841175 -0.190237 0.505447
+-0.0275373 0.841175 -0.222887 0.491923
+-0.0824938 0.837573 -0.254583 0.476292
+-0.137097 0.830384 -0.285189 0.458622
+-0.191113 0.81964 -0.314574 0.438987
+-0.244311 0.805385 -0.342612 0.417473
+-0.296463 0.787682 -0.369182 0.394172
+-0.347345 0.766606 -0.394172 0.369182
+-0.396739 0.742247 -0.417473 0.342612
+-0.444435 0.71471 -0.438987 0.314574
+-0.490228 0.684112 -0.458622 0.285189
+-0.533921 0.650585 -0.476292 0.254583
+-0.575329 0.614272 -0.491923 0.222887
+-0.614272 0.575329 -0.505447 0.190237
+-0.650585 0.533921 -0.516807 0.156772
+-0.684112 0.490228 -0.525954 0.122635
+-0.71471 0.444435 -0.532848 0.0879736
+-0.742247 0.39674 -0.537461 0.0529354
+-0.766606 0.347345 -0.539773 0.0176703
+-0.787682 0.296463 -0.539773 -0.0176704
+-0.805385 0.244311 -0.537461 -0.0529353
+-0.81964 0.191113 -0.532848 -0.0879736
+-0.830384 0.137097 -0.525954 -0.122635
+-0.837573 0.0824937 -0.516807 -0.156772
+-0.841175 0.0275373 -0.505447 -0.190237
+0.865562 0.0283356 0.433013 0.25
+0.861855 0.0848853 0.415735 0.277785
+0.854458 0.141072 0.396677 0.304381
+0.843402 0.196654 0.37592 0.329673
+0.828735 0.251394 0.353553 0.353553
+0.810518 0.305057 0.329673 0.37592
+0.788831 0.357415 0.304381 0.396677
+0.763766 0.408242 0.277785 0.415735
+0.735431 0.45732 0.25 0.433013
+0.703946 0.50444 0.221144 0.448436
+0.669447 0.549401 0.191342 0.46194
+0.632081 0.592008 0.16072 0.473465
+0.592008 0.632081 0.12941 0.482963
+0.549401 0.669447 0.0975452 0.490393
+0.50444 0.703946 0.0652631 0.495722
+0.45732 0.735431 0.0327015 0.498929
+0.408241 0.763766 -4.11028e-08 0.5
+0.357415 0.788831 -0.0327016 0.498929
+0.305057 0.810518 -0.0652631 0.495722
+0.251394 0.828735 -0.0975452 0.490393
+0.196654 0.843402 -0.12941 0.482963
+0.141072 0.854458 -0.16072 0.473465
+0.0848852 0.861855 -0.191342 0.46194
+0.0283355 0.865562 -0.221144 0.448436
+-0.0283356 0.865562 -0.25 0.433013
+-0.0848854 0.861855 -0.277785 0.415735
+-0.141072 0.854458 -0.304381 0.396677
+-0.196654 0.843402 -0.329673 0.37592
+-0.251394 0.828735 -0.353553 0.353553
+-0.305058 0.810518 -0.37592 0.329673
+-0.357415 0.788831 -0.396677 0.304381
+-0.408241 0.763766 -0.415735 0.277785
+-0.45732 0.735431 -0.433013 0.25
+-0.504441 0.703946 -0.448436 0.221144
+-0.549401 0.669447 -0.46194 0.191342
+-0.592008 0.632081 -0.473465 0.16072
+-0.632081 0.592008 -0.482963 0.12941
+-0.669447 0.549401 -0.490393 0.0975451
+-0.703946 0.504441 -0.495722 0.0652631
+-0.735431 0.45732 -0.498929 0.0327016
+-0.763766 0.408242 -0.5 5.62508e-08
+-0.788831 0.357415 -0.498929 -0.0327016
+-0.810518 0.305057 -0.495722 -0.0652631
+-0.828735 0.251394 -0.490393 -0.0975451
+-0.843402 0.196654 -0.482963 -0.12941
+-0.854458 0.141072 -0.473465 -0.16072
+-0.861855 0.0848853 -0.46194 -0.191342
+-0.865562 0.0283356 -0.448436 -0.221144
+0.865562 0.0283356 0.473465 0.16072
+0.861855 0.0848853 0.46194 0.191342
+0.854458 0.141072 0.448436 0.221144
+0.843402 0.196654 0.433013 0.25
+0.828735 0.251394 0.415735 0.277785
+0.810518 0.305057 0.396677 0.304381
+0.788831 0.357415 0.37592 0.329673
+0.763766 0.408242 0.353553 0.353553
+0.735431 0.45732 0.329673 0.37592
+0.703946 0.50444 0.304381 0.396677
+0.669447 0.549401 0.277785 0.415735
+0.632081 0.592008 0.25 0.433013
+0.592008 0.632081 0.221144 0.448436
+0.549401 0.669447 0.191342 0.46194
+0.50444 0.703946 0.16072 0.473465
+0.45732 0.735431 0.129409 0.482963
+0.408241 0.763766 0.0975451 0.490393
+0.357415 0.788831 0.0652631 0.495722
+0.305057 0.810518 0.0327015 0.498929
+0.251394 0.828735 -2.1483e-08 0.5
+0.196654 0.843402 -0.0327016 0.498929
+0.141072 0.854458 -0.0652632 0.495722
+0.0848852 0.861855 -0.0975452 0.490393
+0.0283355 0.865562 -0.12941 0.482963
+-0.0283356 0.865562 -0.16072 0.473465
+-0.0848854 0.861855 -0.191342 0.46194
+-0.141072 0.854458 -0.221144 0.448436
+-0.196654 0.843402 -0.25 0.433013
+-0.251394 0.828735 -0.277785 0.415735
+-0.305058 0.810518 -0.304381 0.396677
+-0.357415 0.788831 -0.329673 0.37592
+-0.408241 0.763766 -0.353553 0.353553
+-0.45732 0.735431 -0.37592 0.329673
+-0.504441 0.703946 -0.396677 0.304381
+-0.549401 0.669447 -0.415735 0.277785
+-0.592008 0.632081 -0.433013 0.25
+-0.632081 0.592008 -0.448436 0.221144
+-0.669447 0.549401 -0.46194 0.191342
+-0.703946 0.504441 -0.473465 0.16072
+-0.735431 0.45732 -0.482963 0.12941
+-0.763766 0.408242 -0.490393 0.0975452
+-0.788831 0.357415 -0.495722 0.0652631
+-0.810518 0.305057 -0.498929 0.0327015
+-0.828735 0.251394 -0.5 1.62659e-08
+-0.843402 0.196654 -0.498929 -0.0327016
+-0.854458 0.141072 -0.495722 -0.0652631
+-0.861855 0.0848853 -0.490393 -0.0975452
+-0.865562 0.0283356 -0.482963 -0.129409
+0.88928 0.029112 0.415751 0.188374
+0.885472 0.0872114 0.40254 0.215162
+0.877872 0.144937 0.387606 0.241029
+0.866513 0.202043 0.371012 0.265863
+0.851444 0.258283 0.352829 0.28956
+0.832728 0.313417 0.333136 0.312016
+0.810447 0.367209 0.312016 0.333136
+0.784695 0.419428 0.28956 0.352829
+0.755583 0.469852 0.265863 0.371012
+0.723236 0.518263 0.241029 0.387606
+0.687791 0.564456 0.215162 0.40254
+0.649401 0.608231 0.188374 0.415751
+0.608231 0.649401 0.160779 0.427181
+0.564456 0.687791 0.132496 0.436782
+0.518263 0.723236 0.103646 0.444512
+0.469852 0.755583 0.0743512 0.450339
+0.419428 0.784695 0.0447385 0.454238
+0.367209 0.810447 0.0149341 0.456191
+0.313417 0.832728 -0.0149342 0.456191
+0.258283 0.851444 -0.0447385 0.454238
+0.202043 0.866513 -0.0743513 0.450339
+0.144937 0.877872 -0.103646 0.444512
+0.0872113 0.885472 -0.132496 0.436781
+0.0291119 0.88928 -0.160779 0.427181
+-0.0291121 0.88928 -0.188374 0.415751
+-0.0872115 0.885472 -0.215162 0.40254
+-0.144937 0.877872 -0.241029 0.387606
+-0.202043 0.866513 -0.265863 0.371012
+-0.258283 0.851444 -0.28956 0.352829
+-0.313417 0.832728 -0.312016 0.333136
+-0.367209 0.810447 -0.333136 0.312016
+-0.419428 0.784695 -0.352829 0.28956
+-0.469852 0.755583 -0.371012 0.265863
+-0.518263 0.723236 -0.387606 0.241029
+-0.564456 0.687791 -0.40254 0.215162
+-0.608231 0.649401 -0.415751 0.188374
+-0.649401 0.608231 -0.427181 0.160779
+-0.687791 0.564456 -0.436782 0.132496
+-0.723236 0.518263 -0.444512 0.103646
+-0.755583 0.469852 -0.450339 0.0743513
+-0.784695 0.419428 -0.454238 0.0447386
+-0.810447 0.367209 -0.456191 0.0149342
+-0.832728 0.313417 -0.456191 -0.0149342
+-0.851444 0.258283 -0.454238 -0.0447385
+-0.866513 0.202043 -0.450339 -0.0743513
+-0.877872 0.144937 -0.444512 -0.103646
+-0.885472 0.0872113 -0.436782 -0.132496
+-0.88928 0.0291121 -0.427181 -0.160779
+0.88928 0.029112 0.371012 0.265863
+0.885472 0.0872114 0.352829 0.28956
+0.877872 0.144937 0.333136 0.312016
+0.866513 0.202043 0.312016 0.333136
+0.851444 0.258283 0.28956 0.352829
+0.832728 0.313417 0.265863 0.371012
+0.810447 0.367209 0.241029 0.387606
+0.784695 0.419428 0.215162 0.40254
+0.755583 0.469852 0.188374 0.415751
+0.723236 0.518263 0.160779 0.427181
+0.687791 0.564456 0.132496 0.436782
+0.649401 0.608231 0.103646 0.444512
+0.608231 0.649401 0.0743513 0.450339
+0.564456 0.687791 0.0447385 0.454238
+0.518263 0.723236 0.0149341 0.456191
+0.469852 0.755583 -0.0149342 0.456191
+0.419428 0.784695 -0.0447385 0.454238
+0.367209 0.810447 -0.0743513 0.450339
+0.313417 0.832728 -0.103646 0.444512
+0.258283 0.851444 -0.132496 0.436782
+0.202043 0.866513 -0.160779 0.427181
+0.144937 0.877872 -0.188374 0.415751
+0.0872113 0.885472 -0.215162 0.40254
+0.0291119 0.88928 -0.241029 0.387606
+-0.0291121 0.88928 -0.265864 0.371012
+-0.0872115 0.885472 -0.28956 0.352829
+-0.144937 0.877872 -0.312016 0.333136
+-0.202043 0.866513 -0.333136 0.312016
+-0.258283 0.851444 -0.352829 0.28956
+-0.313417 0.832728 -0.371012 0.265863
+-0.367209 0.810447 -0.387606 0.241029
+-0.419428 0.784695 -0.40254 0.215162
+-0.469852 0.755583 -0.415751 0.188374
+-0.518263 0.723236 -0.427181 0.160779
+-0.564456 0.687791 -0.436782 0.132496
+-0.608231 0.649401 -0.444512 0.103646
+-0.649401 0.608231 -0.450339 0.0743513
+-0.687791 0.564456 -0.454238 0.0447385
+-0.723236 0.518263 -0.456191 0.0149342
+-0.755583 0.469852 -0.456191 -0.0149342
+-0.784695 0.419428 -0.454238 -0.0447384
+-0.810447 0.367209 -0.450339 -0.0743513
+-0.832728 0.313417 -0.444512 -0.103646
+-0.851444 0.258283 -0.436782 -0.132496
+-0.866513 0.202043 -0.427181 -0.160779
+-0.877872 0.144937 -0.415751 -0.188374
+-0.885472 0.0872113 -0.40254 -0.215162
+-0.88928 0.0291121 -0.387606 -0.241029
+0.912382 0.0298683 0.306937 0.269177
+0.908475 0.089477 0.288675 0.288675
+0.900678 0.148703 0.269177 0.306937
+0.889024 0.207291 0.248526 0.323885
+0.873563 0.264992 0.226811 0.339446
+0.854361 0.321559 0.204124 0.353553
+0.831501 0.376748 0.180564 0.366147
+0.80508 0.430324 0.15623 0.377172
+0.775212 0.482058 0.131227 0.386583
+0.742024 0.531727 0.105662 0.394338
+0.705659 0.579119 0.0796453 0.400404
+0.666272 0.624032 0.0532871 0.404756
+0.624032 0.666272 0.0267007 0.407374
+0.579119 0.705659 -9.12808e-10 0.408248
+0.531727 0.742024 -0.0267007 0.407374
+0.482058 0.775212 -0.0532871 0.404756
+0.430324 0.80508 -0.0796453 0.400404
+0.376748 0.831501 -0.105662 0.394338
+0.321559 0.854361 -0.131227 0.386583
+0.264992 0.873563 -0.15623 0.377172
+0.207291 0.889024 -0.180564 0.366147
+0.148702 0.900678 -0.204124 0.353553
+0.0894769 0.908475 -0.226811 0.339446
+0.0298682 0.912382 -0.248526 0.323885
+-0.0298684 0.912382 -0.269177 0.306937
+-0.0894771 0.908475 -0.288675 0.288675
+-0.148703 0.900678 -0.306937 0.269177
+-0.207291 0.889024 -0.323885 0.248526
+-0.264993 0.873563 -0.339446 0.226811
+-0.321559 0.854361 -0.353553 0.204124
+-0.376748 0.831501 -0.366147 0.180564
+-0.430324 0.80508 -0.377172 0.15623
+-0.482058 0.775212 -0.386583 0.131227
+-0.531727 0.742024 -0.394338 0.105662
+-0.579119 0.705659 -0.400404 0.0796453
+-0.624032 0.666272 -0.404756 0.0532871
+-0.666272 0.624032 -0.407374 0.0267007
+-0.705659 0.579119 -0.408248 -1.87579e-08
+-0.742024 0.531727 -0.407374 -0.0267007
+-0.775212 0.482058 -0.404756 -0.0532871
+-0.80508 0.430324 -0.400404 -0.0796452
+-0.831501 0.376748 -0.394338 -0.105662
+-0.854361 0.321559 -0.386583 -0.131227
+-0.873563 0.264992 -0.377172 -0.15623
+-0.889024 0.207291 -0.366147 -0.180564
+-0.900678 0.148703 -0.353553 -0.204124
+-0.908475 0.089477 -0.339446 -0.226811
+-0.912382 0.0298684 -0.323885 -0.248526
+0.912382 0.0298683 0.353553 0.204124
+0.908475 0.089477 0.339446 0.226811
+0.900678 0.148703 0.323885 0.248526
+0.889024 0.207291 0.306937 0.269177
+0.873563 0.264992 0.288675 0.288675
+0.854361 0.321559 0.269177 0.306937
+0.831501 0.376748 0.248526 0.323885
+0.80508 0.430324 0.226811 0.339446
+0.775212 0.482058 0.204124 0.353553
+0.742024 0.531727 0.180564 0.366147
+0.705659 0.579119 0.15623 0.377172
+0.666272 0.624032 0.131227 0.386583
+0.624032 0.666272 0.105662 0.394338
+0.579119 0.705659 0.0796453 0.400404
+0.531727 0.742024 0.0532871 0.404756
+0.482058 0.775212 0.0267007 0.407374
+0.430324 0.80508 -3.35603e-08 0.408248
+0.376748 0.831501 -0.0267007 0.407374
+0.321559 0.854361 -0.0532871 0.404756
+0.264992 0.873563 -0.0796453 0.400404
+0.207291 0.889024 -0.105662 0.394338
+0.148702 0.900678 -0.131227 0.386583
+0.0894769 0.908475 -0.15623 0.377172
+0.0298682 0.912382 -0.180564 0.366147
+-0.0298684 0.912382 -0.204124 0.353553
+-0.0894771 0.908475 -0.226811 0.339446
+-0.148703 0.900678 -0.248526 0.323885
+-0.207291 0.889024 -0.269177 0.306937
+-0.264993 0.873563 -0.288675 0.288675
+-0.321559 0.854361 -0.306937 0.269177
+-0.376748 0.831501 -0.323885 0.248526
+-0.430324 0.80508 -0.339446 0.226811
+-0.482058 0.775212 -0.353553 0.204124
+-0.531727 0.742024 -0.366147 0.180564
+-0.579119 0.705659 -0.377172 0.15623
+-0.624032 0.666272 -0.386583 0.131227
+-0.666272 0.624032 -0.394338 0.105662
+-0.705659 0.579119 -0.400404 0.0796453
+-0.742024 0.531727 -0.404756 0.0532871
+-0.775212 0.482058 -0.407374 0.0267007
+-0.80508 0.430324 -0.408248 4.59286e-08
+-0.831501 0.376748 -0.407374 -0.0267007
+-0.854361 0.321559 -0.404756 -0.0532871
+-0.873563 0.264992 -0.400404 -0.0796453
+-0.889024 0.207291 -0.394338 -0.105662
+-0.900678 0.148703 -0.386583 -0.131227
+-0.908475 0.089477 -0.377172 -0.15623
+-0.912382 0.0298684 -0.366147 -0.180564
+0.933521 0.0305603 0.296084 0.199846
+0.929524 0.0915501 0.28238 0.218783
+0.921546 0.152148 0.267466 0.236783
+0.909622 0.212094 0.251407 0.253769
+0.893803 0.271132 0.234272 0.269668
+0.874156 0.329009 0.216133 0.284413
+0.850766 0.385477 0.197069 0.29794
+0.823733 0.440295 0.17716 0.310191
+0.793173 0.493227 0.156494 0.321114
+0.759216 0.544047 0.135157 0.330661
+0.722008 0.592537 0.113241 0.338793
+0.681709 0.63849 0.0908405 0.345474
+0.63849 0.681709 0.068051 0.350675
+0.592537 0.722008 0.04497 0.354375
+0.544047 0.759216 0.0216964 0.356558
+0.493227 0.793173 -0.00167001 0.357213
+0.440295 0.823733 -0.0250293 0.356339
+0.385477 0.850766 -0.0482814 0.353939
+0.329009 0.874156 -0.0713268 0.350024
+0.271132 0.893803 -0.0940667 0.344609
+0.212094 0.909622 -0.116404 0.337719
+0.152148 0.921546 -0.138243 0.329383
+0.09155 0.929524 -0.159489 0.319636
+0.0305602 0.933521 -0.180053 0.308521
+-0.0305604 0.933521 -0.199846 0.296084
+-0.0915502 0.929524 -0.218783 0.28238
+-0.152148 0.921546 -0.236783 0.267466
+-0.212094 0.909622 -0.253769 0.251407
+-0.271132 0.893803 -0.269668 0.234272
+-0.329009 0.874156 -0.284413 0.216133
+-0.385477 0.850766 -0.29794 0.197069
+-0.440295 0.823733 -0.310191 0.17716
+-0.493227 0.793173 -0.321114 0.156494
+-0.544047 0.759216 -0.330661 0.135157
+-0.592537 0.722008 -0.338793 0.113241
+-0.63849 0.681709 -0.345474 0.0908405
+-0.681709 0.63849 -0.350675 0.068051
+-0.722008 0.592537 -0.354375 0.04497
+-0.759216 0.544047 -0.356558 0.0216965
+-0.793173 0.493227 -0.357213 -0.00166999
+-0.823733 0.440295 -0.356339 -0.0250292
+-0.850766 0.385477 -0.353939 -0.0482814
+-0.874156 0.329009 -0.350024 -0.0713268
+-0.893803 0.271132 -0.344609 -0.0940667
+-0.909622 0.212094 -0.337719 -0.116404
+-0.921546 0.152148 -0.329383 -0.138242
+-0.929524 0.0915501 -0.319636 -0.159489
+-0.933521 0.0305604 -0.308521 -0.180053
+0.88928 0.029112 0.444512 0.103646
+0.885472 0.0872114 0.436782 0.132496
+0.877872 0.144937 0.427181 0.160779
+0.866513 0.202043 0.415751 0.188374
+0.851444 0.258283 0.40254 0.215162
+0.832728 0.313417 0.387606 0.241029
+0.810447 0.367209 0.371012 0.265863
+0.784695 0.419428 0.352829 0.28956
+0.755583 0.469852 0.333136 0.312016
+0.723236 0.518263 0.312016 0.333136
+0.687791 0.564456 0.28956 0.352829
+0.649401 0.608231 0.265863 0.371012
+0.608231 0.649401 0.241029 0.387606
+0.564456 0.687791 0.215162 0.40254
+0.518263 0.723236 0.188374 0.415751
+0.469852 0.755583 0.160779 0.427181
+0.419428 0.784695 0.132496 0.436782
+0.367209 0.810447 0.103646 0.444512
+0.313417 0.832728 0.0743512 0.450339
+0.258283 0.851444 0.0447385 0.454238
+0.202043 0.866513 0.0149341 0.456191
+0.144937 0.877872 -0.0149342 0.456191
+0.0872113 0.885472 -0.0447386 0.454238
+0.0291119 0.88928 -0.0743513 0.450339
+-0.0291121 0.88928 -0.103646 0.444512
+-0.0872115 0.885472 -0.132496 0.436781
+-0.144937 0.877872 -0.160779 0.427181
+-0.202043 0.866513 -0.188374 0.415751
+-0.258283 0.851444 -0.215162 0.40254
+-0.313417 0.832728 -0.241029 0.387606
+-0.367209 0.810447 -0.265864 0.371012
+-0.419428 0.784695 -0.28956 0.352829
+-0.469852 0.755583 -0.312016 0.333136
+-0.518263 0.723236 -0.333136 0.312016
+-0.564456 0.687791 -0.352829 0.28956
+-0.608231 0.649401 -0.371012 0.265863
+-0.649401 0.608231 -0.387606 0.241029
+-0.687791 0.564456 -0.40254 0.215162
+-0.723236 0.518263 -0.415751 0.188374
+-0.755583 0.469852 -0.427181 0.160779
+-0.784695 0.419428 -0.436781 0.132496
+-0.810447 0.367209 -0.444512 0.103646
+-0.832728 0.313417 -0.450339 0.0743512
+-0.851444 0.258283 -0.454238 0.0447385
+-0.866513 0.202043 -0.456191 0.0149341
+-0.877872 0.144937 -0.456191 -0.0149341
+-0.885472 0.0872113 -0.454238 -0.0447385
+-0.88928 0.0291121 -0.450339 -0.0743512
+0.912382 0.0298683 0.386583 0.131227
+0.908475 0.089477 0.377172 0.15623
+0.900678 0.148703 0.366147 0.180564
+0.889024 0.207291 0.353553 0.204124
+0.873563 0.264992 0.339446 0.226811
+0.854361 0.321559 0.323885 0.248526
+0.831501 0.376748 0.306937 0.269177
+0.80508 0.430324 0.288675 0.288675
+0.775212 0.482058 0.269177 0.306937
+0.742024 0.531727 0.248526 0.323885
+0.705659 0.579119 0.226811 0.339446
+0.666272 0.624032 0.204124 0.353553
+0.624032 0.666272 0.180564 0.366147
+0.579119 0.705659 0.15623 0.377172
+0.531727 0.742024 0.131227 0.386583
+0.482058 0.775212 0.105662 0.394338
+0.430324 0.80508 0.0796453 0.400404
+0.376748 0.831501 0.0532871 0.404756
+0.321559 0.854361 0.0267007 0.407374
+0.264992 0.873563 -1.75408e-08 0.408248
+0.207291 0.889024 -0.0267007 0.407374
+0.148702 0.900678 -0.0532871 0.404756
+0.0894769 0.908475 -0.0796453 0.400404
+0.0298682 0.912382 -0.105662 0.394338
+-0.0298684 0.912382 -0.131227 0.386583
+-0.0894771 0.908475 -0.15623 0.377172
+-0.148703 0.900678 -0.180564 0.366147
+-0.207291 0.889024 -0.204124 0.353553
+-0.264993 0.873563 -0.226811 0.339446
+-0.321559 0.854361 -0.248526 0.323885
+-0.376748 0.831501 -0.269177 0.306937
+-0.430324 0.80508 -0.288675 0.288675
+-0.482058 0.775212 -0.306937 0.269177
+-0.531727 0.742024 -0.323885 0.248526
+-0.579119 0.705659 -0.339446 0.226811
+-0.624032 0.666272 -0.353553 0.204124
+-0.666272 0.624032 -0.366147 0.180564
+-0.705659 0.579119 -0.377172 0.15623
+-0.742024 0.531727 -0.386583 0.131227
+-0.775212 0.482058 -0.394338 0.105662
+-0.80508 0.430324 -0.400404 0.0796453
+-0.831501 0.376748 -0.404756 0.0532871
+-0.854361 0.321559 -0.407374 0.0267007
+-0.873563 0.264992 -0.408248 1.32811e-08
+-0.889024 0.207291 -0.407374 -0.0267007
+-0.900678 0.148703 -0.404756 -0.0532871
+-0.908475 0.089477 -0.400404 -0.0796453
+-0.912382 0.0298684 -0.394338 -0.105662
+0.912382 0.0298683 0.404756 0.0532871
+0.908475 0.089477 0.400404 0.0796453
+0.900678 0.148703 0.394338 0.105662
+0.889024 0.207291 0.386583 0.131227
+0.873563 0.264992 0.377172 0.15623
+0.854361 0.321559 0.366147 0.180564
+0.831501 0.376748 0.353553 0.204124
+0.80508 0.430324 0.339446 0.226811
+0.775212 0.482058 0.323885 0.248526
+0.742024 0.531727 0.306937 0.269177
+0.705659 0.579119 0.288675 0.288675
+0.666272 0.624032 0.269177 0.306937
+0.624032 0.666272 0.248526 0.323885
+0.579119 0.705659 0.226811 0.339446
+0.531727 0.742024 0.204124 0.353553
+0.482058 0.775212 0.180564 0.366147
+0.430324 0.80508 0.15623 0.377172
+0.376748 0.831501 0.131227 0.386583
+0.321559 0.854361 0.105662 0.394338
+0.264992 0.873563 0.0796453 0.400404
+0.207291 0.889024 0.0532871 0.404756
+0.148702 0.900678 0.0267007 0.407374
+0.0894769 0.908475 -5.01883e-08 0.408248
+0.0298682 0.912382 -0.0267008 0.407374
+-0.0298684 0.912382 -0.0532871 0.404756
+-0.0894771 0.908475 -0.0796453 0.400404
+-0.148703 0.900678 -0.105662 0.394338
+-0.207291 0.889024 -0.131227 0.386583
+-0.264993 0.873563 -0.15623 0.377172
+-0.321559 0.854361 -0.180564 0.366147
+-0.376748 0.831501 -0.204124 0.353553
+-0.430324 0.80508 -0.226811 0.339446
+-0.482058 0.775212 -0.248526 0.323885
+-0.531727 0.742024 -0.269177 0.306937
+-0.579119 0.705659 -0.288675 0.288675
+-0.624032 0.666272 -0.306937 0.269177
+-0.666272 0.624032 -0.323885 0.248526
+-0.705659 0.579119 -0.339446 0.226811
+-0.742024 0.531727 -0.353553 0.204124
+-0.775212 0.482058 -0.366147 0.180564
+-0.80508 0.430324 -0.377172 0.15623
+-0.831501 0.376748 -0.386583 0.131227
+-0.854361 0.321559 -0.394338 0.105662
+-0.873563 0.264992 -0.400404 0.0796453
+-0.889024 0.207291 -0.404756 0.0532871
+-0.900678 0.148703 -0.407374 0.0267007
+-0.908475 0.089477 -0.408248 -1.93664e-08
+-0.912382 0.0298684 -0.407374 -0.0267007
+0.933521 0.0305603 0.353472 0.0515886
+0.929524 0.0915501 0.349342 0.0745963
+0.921546 0.152148 0.343715 0.0972846
+0.909622 0.212094 0.336616 0.119556
+0.893803 0.271132 0.328076 0.141316
+0.874156 0.329009 0.318131 0.162471
+0.850766 0.385477 0.306824 0.18293
+0.823733 0.440295 0.294203 0.202605
+0.793173 0.493227 0.280322 0.221413
+0.759216 0.544047 0.265241 0.239273
+0.722008 0.592537 0.249023 0.256108
+0.681709 0.63849 0.23174 0.271847
+0.63849 0.681709 0.213464 0.286421
+0.592537 0.722008 0.194274 0.299769
+0.544047 0.759216 0.174252 0.311834
+0.493227 0.793173 0.153484 0.322563
+0.440295 0.823733 0.132059 0.33191
+0.385477 0.850766 0.110068 0.339837
+0.329009 0.874156 0.0876064 0.346308
+0.271132 0.893803 0.0647692 0.351296
+0.212094 0.909622 0.0416547 0.35478
+0.152148 0.921546 0.0183617 0.356745
+0.09155 0.929524 -0.00500984 0.357182
+0.0305602 0.933521 -0.0283599 0.35609
+-0.0305604 0.933521 -0.0515886 0.353472
+-0.0915502 0.929524 -0.0745963 0.349342
+-0.152148 0.921546 -0.0972847 0.343715
+-0.212094 0.909622 -0.119556 0.336616
+-0.271132 0.893803 -0.141316 0.328076
+-0.329009 0.874156 -0.162471 0.318131
+-0.385477 0.850766 -0.18293 0.306824
+-0.440295 0.823733 -0.202605 0.294203
+-0.493227 0.793173 -0.221413 0.280322
+-0.544047 0.759216 -0.239273 0.265241
+-0.592537 0.722008 -0.256108 0.249023
+-0.63849 0.681709 -0.271847 0.23174
+-0.681709 0.63849 -0.286421 0.213464
+-0.722008 0.592537 -0.299769 0.194274
+-0.759216 0.544047 -0.311834 0.174252
+-0.793173 0.493227 -0.322563 0.153484
+-0.823733 0.440295 -0.33191 0.132059
+-0.850766 0.385477 -0.339837 0.110069
+-0.874156 0.329009 -0.346308 0.0876064
+-0.893803 0.271132 -0.351296 0.0647693
+-0.909622 0.212094 -0.35478 0.0416547
+-0.921546 0.152148 -0.356745 0.0183618
+-0.929524 0.0915501 -0.357182 -0.00500981
+-0.933521 0.0305604 -0.35609 -0.0283599
+0.933521 0.0305603 0.333131 0.12895
+0.929524 0.0915501 0.323984 0.150462
+0.921546 0.152148 0.313449 0.171329
+0.909622 0.212094 0.301573 0.191463
+0.893803 0.271132 0.288405 0.210777
+0.874156 0.329009 0.274002 0.229188
+0.850766 0.385477 0.258425 0.246618
+0.823733 0.440295 0.241743 0.262992
+0.793173 0.493227 0.224025 0.278239
+0.759216 0.544047 0.205347 0.292296
+0.722008 0.592537 0.18579 0.3051
+0.681709 0.63849 0.165438 0.316598
+0.63849 0.681709 0.144377 0.32674
+0.592537 0.722008 0.122698 0.335484
+0.544047 0.759216 0.100494 0.34279
+0.493227 0.793173 0.0778593 0.348629
+0.440295 0.823733 0.0548912 0.352975
+0.385477 0.850766 0.031688 0.355809
+0.329009 0.874156 0.00834915 0.35712
+0.271132 0.893803 -0.0150255 0.356901
+0.212094 0.909622 -0.0383357 0.355154
+0.152148 0.921546 -0.0614819 0.351887
+0.09155 0.929524 -0.0843647 0.347112
+0.0305602 0.933521 -0.106886 0.340851
+-0.0305604 0.933521 -0.12895 0.333131
+-0.0915502 0.929524 -0.150462 0.323984
+-0.152148 0.921546 -0.171329 0.313449
+-0.212094 0.909622 -0.191463 0.301573
+-0.271132 0.893803 -0.210777 0.288405
+-0.329009 0.874156 -0.229188 0.274002
+-0.385477 0.850766 -0.246618 0.258425
+-0.440295 0.823733 -0.262992 0.241743
+-0.493227 0.793173 -0.278239 0.224025
+-0.544047 0.759216 -0.292296 0.205347
+-0.592537 0.722008 -0.3051 0.18579
+-0.63849 0.681709 -0.316598 0.165438
+-0.681709 0.63849 -0.32674 0.144377
+-0.722008 0.592537 -0.335484 0.122698
+-0.759216 0.544047 -0.34279 0.100494
+-0.793173 0.493227 -0.348629 0.0778593
+-0.823733 0.440295 -0.352975 0.0548913
+-0.850766 0.385477 -0.355809 0.031688
+-0.874156 0.329009 -0.35712 0.00834914
+-0.893803 0.271132 -0.356901 -0.0150254
+-0.909622 0.212094 -0.355154 -0.0383358
+-0.921546 0.152148 -0.351887 -0.0614818
+-0.929524 0.0915501 -0.347112 -0.0843647
+-0.933521 0.0305604 -0.340851 -0.106886
+0.951462 0.0311476 0.278894 0.126365
+0.947388 0.0933095 0.270032 0.144335
+0.939256 0.155072 0.260014 0.161687
+0.927103 0.21617 0.248882 0.178347
+0.91098 0.276343 0.236685 0.194242
+0.890956 0.335332 0.223474 0.209307
+0.867117 0.392885 0.209307 0.223474
+0.839564 0.448756 0.194242 0.236685
+0.808416 0.502706 0.178347 0.248882
+0.773807 0.554502 0.161687 0.260014
+0.735884 0.603924 0.144335 0.270032
+0.69481 0.650761 0.126365 0.278894
+0.65076 0.69481 0.107854 0.286561
+0.603924 0.735884 0.0888812 0.293002
+0.554502 0.773807 0.0695276 0.298188
+0.502706 0.808416 0.0498763 0.302097
+0.448756 0.839564 0.0300115 0.304712
+0.392885 0.867117 0.0100181 0.306022
+0.335332 0.890956 -0.0100181 0.306022
+0.276343 0.91098 -0.0300115 0.304712
+0.21617 0.927103 -0.0498764 0.302097
+0.155072 0.939256 -0.0695277 0.298188
+0.0933094 0.947388 -0.0888812 0.293002
+0.0311475 0.951462 -0.107854 0.286561
+-0.0311477 0.951462 -0.126365 0.278894
+-0.0933096 0.947388 -0.144335 0.270032
+-0.155072 0.939256 -0.161687 0.260014
+-0.21617 0.927103 -0.178347 0.248882
+-0.276343 0.91098 -0.194243 0.236685
+-0.335332 0.890956 -0.209307 0.223474
+-0.392886 0.867116 -0.223474 0.209307
+-0.448756 0.839564 -0.236685 0.194243
+-0.502706 0.808416 -0.248882 0.178347
+-0.554502 0.773807 -0.260014 0.161687
+-0.603924 0.735884 -0.270032 0.144335
+-0.650761 0.69481 -0.278894 0.126365
+-0.69481 0.650761 -0.286561 0.107854
+-0.735884 0.603924 -0.293002 0.0888812
+-0.773807 0.554502 -0.298188 0.0695276
+-0.808416 0.502706 -0.302097 0.0498763
+-0.839564 0.448756 -0.304712 0.0300115
+-0.867117 0.392885 -0.306022 0.0100181
+-0.890956 0.335332 -0.306022 -0.0100182
+-0.91098 0.276343 -0.304712 -0.0300115
+-0.927103 0.21617 -0.302097 -0.0498764
+-0.939256 0.155072 -0.298188 -0.0695276
+-0.947388 0.0933095 -0.293002 -0.0888812
+-0.951462 0.0311477 -0.286562 -0.107854
+0.951462 0.0311476 0.302097 0.0498763
+0.947388 0.0933095 0.298188 0.0695276
+0.939256 0.155072 0.293002 0.0888812
+0.927103 0.21617 0.286561 0.107854
+0.91098 0.276343 0.278894 0.126365
+0.890956 0.335332 0.270032 0.144335
+0.867117 0.392885 0.260014 0.161687
+0.839564 0.448756 0.248882 0.178347
+0.808416 0.502706 0.236685 0.194242
+0.773807 0.554502 0.223474 0.209307
+0.735884 0.603924 0.209307 0.223474
+0.69481 0.650761 0.194242 0.236685
+0.65076 0.69481 0.178347 0.248882
+0.603924 0.735884 0.161687 0.260014
+0.554502 0.773807 0.144335 0.270032
+0.502706 0.808416 0.126365 0.278894
+0.448756 0.839564 0.107854 0.286562
+0.392885 0.867117 0.0888811 0.293002
+0.335332 0.890956 0.0695276 0.298188
+0.276343 0.91098 0.0498763 0.302097
+0.21617 0.927103 0.0300115 0.304712
+0.155072 0.939256 0.0100181 0.306022
+0.0933094 0.947388 -0.0100182 0.306022
+0.0311475 0.951462 -0.0300115 0.304712
+-0.0311477 0.951462 -0.0498764 0.302097
+-0.0933096 0.947388 -0.0695276 0.298188
+-0.155072 0.939256 -0.0888812 0.293002
+-0.21617 0.927103 -0.107854 0.286561
+-0.276343 0.91098 -0.126365 0.278894
+-0.335332 0.890956 -0.144335 0.270032
+-0.392886 0.867116 -0.161687 0.260014
+-0.448756 0.839564 -0.178347 0.248882
+-0.502706 0.808416 -0.194242 0.236685
+-0.554502 0.773807 -0.209307 0.223474
+-0.603924 0.735884 -0.223474 0.209307
+-0.650761 0.69481 -0.236685 0.194242
+-0.69481 0.650761 -0.248882 0.178347
+-0.735884 0.603924 -0.260014 0.161687
+-0.773807 0.554502 -0.270032 0.144335
+-0.808416 0.502706 -0.278894 0.126365
+-0.839564 0.448756 -0.286561 0.107854
+-0.867117 0.392885 -0.293002 0.0888812
+-0.890956 0.335332 -0.298188 0.0695276
+-0.91098 0.276343 -0.302097 0.0498764
+-0.927103 0.21617 -0.304712 0.0300115
+-0.939256 0.155072 -0.306022 0.0100182
+-0.947388 0.0933095 -0.306022 -0.0100181
+-0.951462 0.0311477 -0.304712 -0.0300115
+0.966382 0.0316361 0.250573 0.0481394
+0.962244 0.0947728 0.246888 0.0644245
+0.953986 0.157504 0.242146 0.0804338
+0.941642 0.21956 0.236367 0.0960987
+0.925266 0.280676 0.229575 0.111352
+0.904928 0.340591 0.221801 0.126129
+0.880714 0.399046 0.213077 0.140365
+0.85273 0.455794 0.20344 0.154
+0.821094 0.510589 0.192933 0.166976
+0.785942 0.563198 0.181599 0.179237
+0.747424 0.613395 0.169487 0.190731
+0.705706 0.660965 0.15665 0.201407
+0.660965 0.705706 0.143142 0.211221
+0.613395 0.747424 0.129021 0.220131
+0.563198 0.785942 0.114348 0.228098
+0.510589 0.821094 0.0991844 0.235089
+0.455793 0.85273 0.0835965 0.241072
+0.399046 0.880714 0.0676507 0.246023
+0.340591 0.904928 0.0514151 0.249921
+0.280676 0.925266 0.0349594 0.252749
+0.21956 0.941642 0.018354 0.254494
+0.157504 0.953986 0.00166994 0.25515
+0.0947727 0.962244 -0.0150212 0.254713
+0.031636 0.966382 -0.0316481 0.253185
+-0.0316362 0.966382 -0.0481394 0.250573
+-0.0947729 0.962244 -0.0644246 0.246888
+-0.157504 0.953986 -0.0804339 0.242146
+-0.21956 0.941642 -0.0960987 0.236367
+-0.280676 0.925266 -0.111352 0.229575
+-0.340591 0.904927 -0.126129 0.221801
+-0.399047 0.880714 -0.140365 0.213077
+-0.455793 0.85273 -0.154 0.203441
+-0.510589 0.821094 -0.166976 0.192933
+-0.563198 0.785941 -0.179237 0.181599
+-0.613395 0.747424 -0.190731 0.169487
+-0.660966 0.705706 -0.201407 0.15665
+-0.705706 0.660966 -0.211221 0.143142
+-0.747424 0.613395 -0.220131 0.129021
+-0.785942 0.563198 -0.228098 0.114348
+-0.821094 0.510589 -0.235089 0.0991844
+-0.85273 0.455794 -0.241072 0.0835966
+-0.880714 0.399046 -0.246023 0.0676507
+-0.904928 0.340591 -0.249921 0.0514151
+-0.925266 0.280676 -0.252749 0.0349594
+-0.941642 0.21956 -0.254494 0.018354
+-0.953986 0.157504 -0.25515 0.00166999
+-0.962244 0.0947727 -0.254713 -0.0150212
+-0.966382 0.0316362 -0.253185 -0.031648
+0.933521 0.0305603 0.244191 0.26072
+0.929524 0.0915501 0.226616 0.276133
+0.921546 0.152148 0.208071 0.290363
+0.909622 0.212094 0.188635 0.30335
+0.893803 0.271132 0.168391 0.315037
+0.874156 0.329009 0.147426 0.325376
+0.850766 0.385477 0.12583 0.334322
+0.823733 0.440295 0.103695 0.341836
+0.793173 0.493227 0.0811156 0.347886
+0.759216 0.544047 0.0581891 0.352446
+0.722008 0.592537 0.0350134 0.355497
+0.681709 0.63849 0.0116878 0.357026
+0.63849 0.681709 -0.0116878 0.357026
+0.592537 0.722008 -0.0350134 0.355497
+0.544047 0.759216 -0.0581891 0.352446
+0.493227 0.793173 -0.0811156 0.347886
+0.440295 0.823733 -0.103695 0.341836
+0.385477 0.850766 -0.12583 0.334322
+0.329009 0.874156 -0.147426 0.325376
+0.271132 0.893803 -0.168391 0.315037
+0.212094 0.909622 -0.188635 0.30335
+0.152148 0.921546 -0.208071 0.290363
+0.09155 0.929524 -0.226616 0.276133
+0.0305602 0.933521 -0.244191 0.26072
+-0.0305604 0.933521 -0.26072 0.244191
+-0.0915502 0.929524 -0.276133 0.226616
+-0.152148 0.921546 -0.290363 0.208071
+-0.212094 0.909622 -0.30335 0.188635
+-0.271132 0.893803 -0.315038 0.168391
+-0.329009 0.874156 -0.325376 0.147426
+-0.385477 0.850766 -0.334322 0.12583
+-0.440295 0.823733 -0.341836 0.103695
+-0.493227 0.793173 -0.347886 0.0811156
+-0.544047 0.759216 -0.352446 0.058189
+-0.592537 0.722008 -0.355497 0.0350134
+-0.63849 0.681709 -0.357026 0.0116878
+-0.681709 0.63849 -0.357026 -0.0116878
+-0.722008 0.592537 -0.355497 -0.0350134
+-0.759216 0.544047 -0.352446 -0.058189
+-0.793173 0.493227 -0.347886 -0.0811156
+-0.823733 0.440295 -0.341836 -0.103695
+-0.850766 0.385477 -0.334322 -0.12583
+-0.874156 0.329009 -0.325376 -0.147426
+-0.893803 0.271132 -0.315037 -0.168391
+-0.909622 0.212094 -0.30335 -0.188635
+-0.921546 0.152148 -0.290363 -0.208071
+-0.929524 0.0915501 -0.276133 -0.226616
+-0.933521 0.0305604 -0.26072 -0.244191
+0.951462 0.0311476 0.178347 0.248882
+0.947388 0.0933095 0.161687 0.260014
+0.939256 0.155072 0.144335 0.270032
+0.927103 0.21617 0.126365 0.278894
+0.91098 0.276343 0.107854 0.286561
+0.890956 0.335332 0.0888812 0.293002
+0.867117 0.392885 0.0695276 0.298188
+0.839564 0.448756 0.0498763 0.302097
+0.808416 0.502706 0.0300115 0.304712
+0.773807 0.554502 0.0100181 0.306022
+0.735884 0.603924 -0.0100181 0.306022
+0.69481 0.650761 -0.0300115 0.304712
+0.65076 0.69481 -0.0498764 0.302097
+0.603924 0.735884 -0.0695276 0.298188
+0.554502 0.773807 -0.0888812 0.293002
+0.502706 0.808416 -0.107854 0.286561
+0.448756 0.839564 -0.126365 0.278894
+0.392885 0.867117 -0.144335 0.270032
+0.335332 0.890956 -0.161687 0.260014
+0.276343 0.91098 -0.178347 0.248882
+0.21617 0.927103 -0.194242 0.236685
+0.155072 0.939256 -0.209307 0.223474
+0.0933094 0.947388 -0.223474 0.209307
+0.0311475 0.951462 -0.236685 0.194242
+-0.0311477 0.951462 -0.248882 0.178347
+-0.0933096 0.947388 -0.260014 0.161687
+-0.155072 0.939256 -0.270032 0.144335
+-0.21617 0.927103 -0.278894 0.126365
+-0.276343 0.91098 -0.286562 0.107854
+-0.335332 0.890956 -0.293002 0.0888811
+-0.392886 0.867116 -0.298188 0.0695276
+-0.448756 0.839564 -0.302097 0.0498764
+-0.502706 0.808416 -0.304712 0.0300115
+-0.554502 0.773807 -0.306022 0.0100181
+-0.603924 0.735884 -0.306022 -0.0100181
+-0.650761 0.69481 -0.304712 -0.0300115
+-0.69481 0.650761 -0.302097 -0.0498763
+-0.735884 0.603924 -0.298188 -0.0695276
+-0.773807 0.554502 -0.293002 -0.0888811
+-0.808416 0.502706 -0.286561 -0.107854
+-0.839564 0.448756 -0.278894 -0.126365
+-0.867117 0.392885 -0.270032 -0.144335
+-0.890956 0.335332 -0.260014 -0.161687
+-0.91098 0.276343 -0.248882 -0.178347
+-0.927103 0.21617 -0.236685 -0.194242
+-0.939256 0.155072 -0.223474 -0.209307
+-0.947388 0.0933095 -0.209307 -0.223474
+-0.951462 0.0311477 -0.194243 -0.236685
+0.951462 0.0311476 0.236685 0.194242
+0.947388 0.0933095 0.223474 0.209307
+0.939256 0.155072 0.209307 0.223474
+0.927103 0.21617 0.194242 0.236685
+0.91098 0.276343 0.178347 0.248882
+0.890956 0.335332 0.161687 0.260014
+0.867117 0.392885 0.144335 0.270032
+0.839564 0.448756 0.126365 0.278894
+0.808416 0.502706 0.107854 0.286561
+0.773807 0.554502 0.0888812 0.293002
+0.735884 0.603924 0.0695276 0.298188
+0.69481 0.650761 0.0498763 0.302097
+0.65076 0.69481 0.0300115 0.304712
+0.603924 0.735884 0.0100181 0.306022
+0.554502 0.773807 -0.0100181 0.306022
+0.502706 0.808416 -0.0300115 0.304712
+0.448756 0.839564 -0.0498764 0.302097
+0.392885 0.867117 -0.0695276 0.298188
+0.335332 0.890956 -0.0888812 0.293002
+0.276343 0.91098 -0.107854 0.286561
+0.21617 0.927103 -0.126365 0.278894
+0.155072 0.939256 -0.144335 0.270032
+0.0933094 0.947388 -0.161687 0.260014
+0.0311475 0.951462 -0.178347 0.248882
+-0.0311477 0.951462 -0.194243 0.236685
+-0.0933096 0.947388 -0.209307 0.223474
+-0.155072 0.939256 -0.223474 0.209307
+-0.21617 0.927103 -0.236685 0.194242
+-0.276343 0.91098 -0.248882 0.178347
+-0.335332 0.890956 -0.260014 0.161687
+-0.392886 0.867116 -0.270032 0.144335
+-0.448756 0.839564 -0.278894 0.126365
+-0.502706 0.808416 -0.286561 0.107854
+-0.554502 0.773807 -0.293002 0.0888811
+-0.603924 0.735884 -0.298188 0.0695276
+-0.650761 0.69481 -0.302097 0.0498763
+-0.69481 0.650761 -0.304712 0.0300115
+-0.735884 0.603924 -0.306022 0.0100181
+-0.773807 0.554502 -0.306022 -0.0100181
+-0.808416 0.502706 -0.304712 -0.0300115
+-0.839564 0.448756 -0.302097 -0.0498763
+-0.867117 0.392885 -0.298188 -0.0695276
+-0.890956 0.335332 -0.293002 -0.0888812
+-0.91098 0.276343 -0.286561 -0.107854
+-0.927103 0.21617 -0.278894 -0.126365
+-0.939256 0.155072 -0.270032 -0.144335
+-0.947388 0.0933095 -0.260014 -0.161687
+-0.951462 0.0311477 -0.248882 -0.178347
+0.966382 0.0316361 0.174422 0.186229
+0.962244 0.0947728 0.161869 0.197238
+0.953986 0.157504 0.148622 0.207402
+0.941642 0.21956 0.134739 0.216678
+0.925266 0.280676 0.120279 0.225027
+0.904928 0.340591 0.105304 0.232412
+0.880714 0.399046 0.0898784 0.238801
+0.85273 0.455794 0.0740676 0.244168
+0.821094 0.510589 0.0579397 0.24849
+0.785942 0.563198 0.0415636 0.251747
+0.747424 0.613395 0.0250096 0.253927
+0.705706 0.660965 0.00834844 0.255019
+0.660965 0.705706 -0.00834845 0.255019
+0.613395 0.747424 -0.0250096 0.253927
+0.563198 0.785942 -0.0415636 0.251747
+0.510589 0.821094 -0.0579397 0.24849
+0.455793 0.85273 -0.0740677 0.244168
+0.399046 0.880714 -0.0898784 0.238801
+0.340591 0.904928 -0.105304 0.232412
+0.280676 0.925266 -0.120279 0.225027
+0.21956 0.941642 -0.134739 0.216678
+0.157504 0.953986 -0.148622 0.207402
+0.0947727 0.962244 -0.161869 0.197238
+0.031636 0.966382 -0.174422 0.186229
+-0.0316362 0.966382 -0.186229 0.174422
+-0.0947729 0.962244 -0.197238 0.161869
+-0.157504 0.953986 -0.207402 0.148622
+-0.21956 0.941642 -0.216678 0.134739
+-0.280676 0.925266 -0.225027 0.120279
+-0.340591 0.904927 -0.232412 0.105304
+-0.399047 0.880714 -0.238801 0.0898784
+-0.455793 0.85273 -0.244168 0.0740677
+-0.510589 0.821094 -0.24849 0.0579397
+-0.563198 0.785941 -0.251747 0.0415636
+-0.613395 0.747424 -0.253927 0.0250096
+-0.660966 0.705706 -0.255019 0.00834843
+-0.705706 0.660966 -0.255019 -0.00834843
+-0.747424 0.613395 -0.253927 -0.0250096
+-0.785942 0.563198 -0.251747 -0.0415636
+-0.821094 0.510589 -0.24849 -0.0579397
+-0.85273 0.455794 -0.244168 -0.0740676
+-0.880714 0.399046 -0.238801 -0.0898784
+-0.904928 0.340591 -0.232412 -0.105304
+-0.925266 0.280676 -0.225027 -0.120279
+-0.941642 0.21956 -0.216678 -0.134739
+-0.953986 0.157504 -0.207402 -0.148622
+-0.962244 0.0947727 -0.197238 -0.161869
+-0.966382 0.0316362 -0.186229 -0.174422
+0.966382 0.0316361 0.108337 0.231013
+0.962244 0.0947728 0.0929965 0.237604
+0.953986 0.157504 0.0772574 0.243178
+0.941642 0.21956 0.0611873 0.24771
+0.925266 0.280676 0.0448553 0.251182
+0.904928 0.340591 0.0283312 0.253577
+0.880714 0.399046 0.0116858 0.254887
+0.85273 0.455794 -0.00500964 0.255106
+0.821094 0.510589 -0.0216836 0.254232
+0.785942 0.563198 -0.0382648 0.25227
+0.747424 0.613395 -0.0546821 0.249227
+0.705706 0.660965 -0.0708652 0.245117
+0.660965 0.705706 -0.0867449 0.239957
+0.613395 0.747424 -0.102253 0.23377
+0.563198 0.785942 -0.117324 0.226582
+0.510589 0.821094 -0.131891 0.218423
+0.455793 0.85273 -0.145895 0.20933
+0.399046 0.880714 -0.159273 0.19934
+0.340591 0.904928 -0.17197 0.188496
+0.280676 0.925266 -0.18393 0.176845
+0.21956 0.941642 -0.195102 0.164437
+0.157504 0.953986 -0.205439 0.151324
+0.0947727 0.962244 -0.214896 0.137564
+0.031636 0.966382 -0.223433 0.123215
+-0.0316362 0.966382 -0.231013 0.108337
+-0.0947729 0.962244 -0.237604 0.0929965
+-0.157504 0.953986 -0.243178 0.0772573
+-0.21956 0.941642 -0.24771 0.0611873
+-0.280676 0.925266 -0.251182 0.0448553
+-0.340591 0.904927 -0.253577 0.0283312
+-0.399047 0.880714 -0.254887 0.0116858
+-0.455793 0.85273 -0.255106 -0.00500961
+-0.510589 0.821094 -0.254232 -0.0216836
+-0.563198 0.785941 -0.25227 -0.0382648
+-0.613395 0.747424 -0.249227 -0.0546821
+-0.660966 0.705706 -0.245117 -0.0708652
+-0.705706 0.660966 -0.239957 -0.0867449
+-0.747424 0.613395 -0.23377 -0.102253
+-0.785942 0.563198 -0.226582 -0.117323
+-0.821094 0.510589 -0.218423 -0.131891
+-0.85273 0.455794 -0.20933 -0.145895
+-0.880714 0.399046 -0.19934 -0.159273
+-0.904928 0.340591 -0.188496 -0.17197
+-0.925266 0.280676 -0.176845 -0.18393
+-0.941642 0.21956 -0.164437 -0.195102
+-0.953986 0.157504 -0.151324 -0.205439
+-0.962244 0.0947727 -0.137564 -0.214896
+-0.966382 0.0316362 -0.123215 -0.223433
+0.978421 0.0320302 0.0332509 0.201398
+0.974231 0.0959534 0.0200077 0.203141
+0.96587 0.159466 0.00667876 0.204015
+0.953372 0.222295 -0.00667876 0.204015
+0.936792 0.284173 -0.0200077 0.203141
+0.9162 0.344833 -0.0332509 0.201398
+0.891686 0.404017 -0.0463517 0.198792
+0.863352 0.461472 -0.0592541 0.195335
+0.831322 0.516949 -0.0719027 0.191041
+0.795732 0.570214 -0.0842435 0.185929
+0.756735 0.621036 -0.0962235 0.180021
+0.714497 0.669199 -0.107791 0.173343
+0.669199 0.714497 -0.118898 0.165922
+0.621036 0.756735 -0.129495 0.15779
+0.570214 0.795732 -0.139538 0.148983
+0.516949 0.831322 -0.148983 0.139538
+0.461471 0.863352 -0.15779 0.129495
+0.404017 0.891686 -0.165922 0.118898
+0.344833 0.9162 -0.173343 0.107791
+0.284173 0.936792 -0.180021 0.0962234
+0.222295 0.953372 -0.185929 0.0842435
+0.159466 0.96587 -0.191041 0.0719027
+0.0959533 0.974231 -0.195335 0.0592541
+0.0320301 0.978421 -0.198792 0.0463517
+-0.0320303 0.978421 -0.201398 0.0332509
+-0.0959535 0.974231 -0.203141 0.0200076
+-0.159466 0.96587 -0.204015 0.00667874
+-0.222295 0.953372 -0.204015 -0.00667877
+-0.284173 0.936792 -0.203141 -0.0200077
+-0.344834 0.9162 -0.201398 -0.0332509
+-0.404018 0.891686 -0.198792 -0.0463518
+-0.461471 0.863352 -0.195335 -0.0592541
+-0.516949 0.831322 -0.191041 -0.0719027
+-0.570214 0.795732 -0.185929 -0.0842435
+-0.621036 0.756735 -0.180021 -0.0962234
+-0.669199 0.714497 -0.173343 -0.107791
+-0.714497 0.669199 -0.165922 -0.118898
+-0.756735 0.621036 -0.15779 -0.129495
+-0.795732 0.570214 -0.148983 -0.139538
+-0.831322 0.516949 -0.139538 -0.148983
+-0.863352 0.461472 -0.129495 -0.15779
+-0.891686 0.404017 -0.118898 -0.165922
+-0.9162 0.344833 -0.107791 -0.173343
+-0.936792 0.284173 -0.0962235 -0.180021
+-0.953372 0.222295 -0.0842435 -0.185929
+-0.96587 0.159466 -0.0719028 -0.191041
+-0.974231 0.0959533 -0.0592541 -0.195335
+-0.978421 0.0320303 -0.0463518 -0.198792
+0.978421 0.0320302 0.107791 0.173343
+0.974231 0.0959534 0.0962235 0.180021
+0.96587 0.159466 0.0842435 0.185929
+0.953372 0.222295 0.0719027 0.191041
+0.936792 0.284173 0.0592541 0.195335
+0.9162 0.344833 0.0463518 0.198792
+0.891686 0.404017 0.0332509 0.201398
+0.863352 0.461472 0.0200077 0.203141
+0.831322 0.516949 0.00667875 0.204015
+0.795732 0.570214 -0.00667875 0.204015
+0.756735 0.621036 -0.0200077 0.203141
+0.714497 0.669199 -0.0332509 0.201398
+0.669199 0.714497 -0.0463518 0.198792
+0.621036 0.756735 -0.0592541 0.195335
+0.570214 0.795732 -0.0719027 0.191041
+0.516949 0.831322 -0.0842435 0.185929
+0.461471 0.863352 -0.0962235 0.180021
+0.404017 0.891686 -0.107791 0.173343
+0.344833 0.9162 -0.118898 0.165922
+0.284173 0.936792 -0.129495 0.15779
+0.222295 0.953372 -0.139538 0.148983
+0.159466 0.96587 -0.148983 0.139538
+0.0959533 0.974231 -0.15779 0.129495
+0.0320301 0.978421 -0.165922 0.118898
+-0.0320303 0.978421 -0.173343 0.107791
+-0.0959535 0.974231 -0.180021 0.0962234
+-0.159466 0.96587 -0.185929 0.0842435
+-0.222295 0.953372 -0.191041 0.0719027
+-0.284173 0.936792 -0.195335 0.0592541
+-0.344834 0.9162 -0.198792 0.0463517
+-0.404018 0.891686 -0.201398 0.0332509
+-0.461471 0.863352 -0.203141 0.0200077
+-0.516949 0.831322 -0.204015 0.00667876
+-0.570214 0.795732 -0.204015 -0.00667877
+-0.621036 0.756735 -0.203141 -0.0200077
+-0.669199 0.714497 -0.201398 -0.0332509
+-0.714497 0.669199 -0.198792 -0.0463517
+-0.756735 0.621036 -0.195335 -0.0592541
+-0.795732 0.570214 -0.191041 -0.0719027
+-0.831322 0.516949 -0.185929 -0.0842435
+-0.863352 0.461472 -0.180021 -0.0962234
+-0.891686 0.404017 -0.173343 -0.107791
+-0.9162 0.344833 -0.165922 -0.118898
+-0.936792 0.284173 -0.15779 -0.129495
+-0.953372 0.222295 -0.148983 -0.139538
+-0.96587 0.159466 -0.139538 -0.148983
+-0.974231 0.0959533 -0.129495 -0.15779
+-0.978421 0.0320303 -0.118898 -0.165922
+0.987683 0.0323334 0.0347638 0.149094
+0.983453 0.0968617 0.0249382 0.151048
+0.975013 0.160975 0.0150057 0.152356
+0.962397 0.224399 0.00500906 0.153011
+0.94566 0.286863 -0.00500907 0.153011
+0.924873 0.348098 -0.0150057 0.152356
+0.900126 0.407842 -0.0249382 0.151048
+0.871525 0.46584 -0.0347638 0.149094
+0.839192 0.521843 -0.0444406 0.146501
+0.803265 0.575611 -0.0539271 0.143281
+0.763898 0.626915 -0.0631826 0.139447
+0.72126 0.675534 -0.0721676 0.135016
+0.675534 0.72126 -0.0808436 0.130007
+0.626915 0.763898 -0.0891733 0.124441
+0.575611 0.803265 -0.0971212 0.118343
+0.521843 0.839192 -0.104653 0.111737
+0.46584 0.871525 -0.111737 0.104653
+0.407842 0.900126 -0.118343 0.0971212
+0.348098 0.924873 -0.124441 0.0891733
+0.286863 0.94566 -0.130007 0.0808435
+0.224399 0.962397 -0.135016 0.0721676
+0.160975 0.975013 -0.139447 0.0631826
+0.0968616 0.983453 -0.143281 0.053927
+0.0323333 0.987683 -0.146501 0.0444406
+-0.0323335 0.987683 -0.149094 0.0347638
+-0.0968618 0.983453 -0.151048 0.0249382
+-0.160975 0.975013 -0.152356 0.0150057
+-0.224399 0.962397 -0.153011 0.00500906
+-0.286863 0.94566 -0.153011 -0.00500909
+-0.348098 0.924873 -0.152356 -0.0150058
+-0.407842 0.900126 -0.151048 -0.0249382
+-0.46584 0.871525 -0.149094 -0.0347638
+-0.521843 0.839192 -0.146501 -0.0444406
+-0.575611 0.803265 -0.143281 -0.0539271
+-0.626915 0.763898 -0.139447 -0.0631826
+-0.675534 0.72126 -0.135016 -0.0721676
+-0.72126 0.675534 -0.130007 -0.0808435
+-0.763898 0.626915 -0.124441 -0.0891733
+-0.803265 0.575611 -0.118343 -0.0971212
+-0.839192 0.521843 -0.111737 -0.104653
+-0.871525 0.46584 -0.104653 -0.111737
+-0.900126 0.407842 -0.0971212 -0.118343
+-0.924873 0.348098 -0.0891733 -0.124441
+-0.94566 0.286863 -0.0808436 -0.130007
+-0.962397 0.224399 -0.0721676 -0.135016
+-0.975013 0.160975 -0.0631826 -0.139447
+-0.983453 0.0968616 -0.053927 -0.143281
+-0.987683 0.0323335 -0.0444406 -0.146501
+0.966382 0.0316361 0.223433 0.123215
+0.962244 0.0947728 0.214896 0.137564
+0.953986 0.157504 0.205439 0.151324
+0.941642 0.21956 0.195102 0.164437
+0.925266 0.280676 0.18393 0.176845
+0.904928 0.340591 0.17197 0.188496
+0.880714 0.399046 0.159273 0.19934
+0.85273 0.455794 0.145895 0.20933
+0.821094 0.510589 0.131891 0.218423
+0.785942 0.563198 0.117324 0.226582
+0.747424 0.613395 0.102253 0.23377
+0.705706 0.660965 0.0867449 0.239957
+0.660965 0.705706 0.0708652 0.245117
+0.613395 0.747424 0.0546821 0.249227
+0.563198 0.785942 0.0382648 0.25227
+0.510589 0.821094 0.0216836 0.254232
+0.455793 0.85273 0.00500962 0.255106
+0.399046 0.880714 -0.0116858 0.254887
+0.340591 0.904928 -0.0283312 0.253577
+0.280676 0.925266 -0.0448553 0.251182
+0.21956 0.941642 -0.0611874 0.24771
+0.157504 0.953986 -0.0772574 0.243178
+0.0947727 0.962244 -0.0929966 0.237604
+0.031636 0.966382 -0.108338 0.231013
+-0.0316362 0.966382 -0.123215 0.223433
+-0.0947729 0.962244 -0.137564 0.214896
+-0.157504 0.953986 -0.151324 0.205439
+-0.21956 0.941642 -0.164437 0.195102
+-0.280676 0.925266 -0.176845 0.18393
+-0.340591 0.904927 -0.188496 0.171969
+-0.399047 0.880714 -0.19934 0.159273
+-0.455793 0.85273 -0.20933 0.145895
+-0.510589 0.821094 -0.218423 0.131891
+-0.563198 0.785941 -0.226582 0.117323
+-0.613395 0.747424 -0.23377 0.102253
+-0.660966 0.705706 -0.239957 0.0867449
+-0.705706 0.660966 -0.245117 0.0708652
+-0.747424 0.613395 -0.249227 0.0546821
+-0.785942 0.563198 -0.25227 0.0382648
+-0.821094 0.510589 -0.254232 0.0216836
+-0.85273 0.455794 -0.255106 0.00500967
+-0.880714 0.399046 -0.254887 -0.0116858
+-0.904928 0.340591 -0.253577 -0.0283313
+-0.925266 0.280676 -0.251182 -0.0448553
+-0.941642 0.21956 -0.24771 -0.0611874
+-0.953986 0.157504 -0.243178 -0.0772573
+-0.962244 0.0947727 -0.237604 -0.0929965
+-0.966382 0.0316362 -0.231013 -0.108337
+0.978421 0.0320302 0.165922 0.118898
+0.974231 0.0959534 0.15779 0.129495
+0.96587 0.159466 0.148983 0.139538
+0.953372 0.222295 0.139538 0.148983
+0.936792 0.284173 0.129495 0.15779
+0.9162 0.344833 0.118898 0.165922
+0.891686 0.404017 0.107791 0.173343
+0.863352 0.461472 0.0962235 0.180021
+0.831322 0.516949 0.0842435 0.185929
+0.795732 0.570214 0.0719027 0.191041
+0.756735 0.621036 0.0592541 0.195335
+0.714497 0.669199 0.0463517 0.198792
+0.669199 0.714497 0.0332509 0.201398
+0.621036 0.756735 0.0200077 0.203141
+0.570214 0.795732 0.00667874 0.204015
+0.516949 0.831322 -0.00667877 0.204015
+0.461471 0.863352 -0.0200077 0.203141
+0.404017 0.891686 -0.0332509 0.201398
+0.344833 0.9162 -0.0463518 0.198792
+0.284173 0.936792 -0.0592541 0.195335
+0.222295 0.953372 -0.0719027 0.191041
+0.159466 0.96587 -0.0842435 0.185929
+0.0959533 0.974231 -0.0962235 0.180021
+0.0320301 0.978421 -0.107791 0.173343
+-0.0320303 0.978421 -0.118898 0.165922
+-0.0959535 0.974231 -0.129495 0.15779
+-0.159466 0.96587 -0.139538 0.148983
+-0.222295 0.953372 -0.148983 0.139538
+-0.284173 0.936792 -0.15779 0.129495
+-0.344834 0.9162 -0.165922 0.118898
+-0.404018 0.891686 -0.173343 0.107791
+-0.461471 0.863352 -0.180021 0.0962235
+-0.516949 0.831322 -0.185929 0.0842435
+-0.570214 0.795732 -0.191041 0.0719027
+-0.621036 0.756735 -0.195335 0.0592541
+-0.669199 0.714497 -0.198792 0.0463517
+-0.714497 0.669199 -0.201398 0.0332509
+-0.756735 0.621036 -0.203141 0.0200077
+-0.795732 0.570214 -0.204015 0.00667877
+-0.831322 0.516949 -0.204015 -0.00667876
+-0.863352 0.461472 -0.203141 -0.0200076
+-0.891686 0.404017 -0.201398 -0.0332509
+-0.9162 0.344833 -0.198792 -0.0463518
+-0.936792 0.284173 -0.195335 -0.0592541
+-0.953372 0.222295 -0.191041 -0.0719028
+-0.96587 0.159466 -0.185929 -0.0842435
+-0.974231 0.0959533 -0.180021 -0.0962235
+-0.978421 0.0320303 -0.173343 -0.107791
+0.978421 0.0320302 0.198792 0.0463517
+0.974231 0.0959534 0.195335 0.0592541
+0.96587 0.159466 0.191041 0.0719027
+0.953372 0.222295 0.185929 0.0842435
+0.936792 0.284173 0.180021 0.0962235
+0.9162 0.344833 0.173343 0.107791
+0.891686 0.404017 0.165922 0.118898
+0.863352 0.461472 0.15779 0.129495
+0.831322 0.516949 0.148983 0.139538
+0.795732 0.570214 0.139538 0.148983
+0.756735 0.621036 0.129495 0.15779
+0.714497 0.669199 0.118898 0.165922
+0.669199 0.714497 0.107791 0.173343
+0.621036 0.756735 0.0962235 0.180021
+0.570214 0.795732 0.0842435 0.185929
+0.516949 0.831322 0.0719027 0.191041
+0.461471 0.863352 0.0592541 0.195335
+0.404017 0.891686 0.0463517 0.198792
+0.344833 0.9162 0.0332509 0.201398
+0.284173 0.936792 0.0200077 0.203141
+0.222295 0.953372 0.00667875 0.204015
+0.159466 0.96587 -0.00667878 0.204015
+0.0959533 0.974231 -0.0200077 0.203141
+0.0320301 0.978421 -0.0332509 0.201398
+-0.0320303 0.978421 -0.0463518 0.198792
+-0.0959535 0.974231 -0.0592541 0.195335
+-0.159466 0.96587 -0.0719028 0.191041
+-0.222295 0.953372 -0.0842435 0.185929
+-0.284173 0.936792 -0.0962235 0.180021
+-0.344834 0.9162 -0.107791 0.173343
+-0.404018 0.891686 -0.118898 0.165922
+-0.461471 0.863352 -0.129495 0.15779
+-0.516949 0.831322 -0.139538 0.148983
+-0.570214 0.795732 -0.148983 0.139538
+-0.621036 0.756735 -0.15779 0.129495
+-0.669199 0.714497 -0.165922 0.118898
+-0.714497 0.669199 -0.173343 0.107791
+-0.756735 0.621036 -0.180021 0.0962234
+-0.795732 0.570214 -0.185929 0.0842435
+-0.831322 0.516949 -0.191041 0.0719027
+-0.863352 0.461472 -0.195335 0.0592541
+-0.891686 0.404017 -0.198792 0.0463517
+-0.9162 0.344833 -0.201398 0.0332509
+-0.936792 0.284173 -0.203141 0.0200077
+-0.953372 0.222295 -0.204015 0.00667874
+-0.96587 0.159466 -0.204015 -0.00667874
+-0.974231 0.0959533 -0.203141 -0.0200077
+-0.978421 0.0320303 -0.201398 -0.0332509
+0.987683 0.0323334 0.146501 0.0444406
+0.983453 0.0968617 0.143281 0.0539271
+0.975013 0.160975 0.139447 0.0631826
+0.962397 0.224399 0.135016 0.0721676
+0.94566 0.286863 0.130007 0.0808435
+0.924873 0.348098 0.124441 0.0891733
+0.900126 0.407842 0.118343 0.0971212
+0.871525 0.46584 0.111737 0.104653
+0.839192 0.521843 0.104653 0.111737
+0.803265 0.575611 0.0971212 0.118343
+0.763898 0.626915 0.0891733 0.124441
+0.72126 0.675534 0.0808435 0.130007
+0.675534 0.72126 0.0721676 0.135016
+0.626915 0.763898 0.0631826 0.139447
+0.575611 0.803265 0.053927 0.143281
+0.521843 0.839192 0.0444406 0.146501
+0.46584 0.871525 0.0347638 0.149094
+0.407842 0.900126 0.0249382 0.151048
+0.348098 0.924873 0.0150057 0.152356
+0.286863 0.94566 0.00500906 0.153011
+0.224399 0.962397 -0.00500907 0.153011
+0.160975 0.975013 -0.0150058 0.152356
+0.0968616 0.983453 -0.0249382 0.151048
+0.0323333 0.987683 -0.0347638 0.149094
+-0.0323335 0.987683 -0.0444406 0.146501
+-0.0968618 0.983453 -0.0539271 0.143281
+-0.160975 0.975013 -0.0631826 0.139447
+-0.224399 0.962397 -0.0721676 0.135016
+-0.286863 0.94566 -0.0808436 0.130007
+-0.348098 0.924873 -0.0891733 0.124441
+-0.407842 0.900126 -0.0971213 0.118343
+-0.46584 0.871525 -0.104653 0.111737
+-0.521843 0.839192 -0.111737 0.104653
+-0.575611 0.803265 -0.118343 0.0971212
+-0.626915 0.763898 -0.124441 0.0891733
+-0.675534 0.72126 -0.130007 0.0808435
+-0.72126 0.675534 -0.135016 0.0721676
+-0.763898 0.626915 -0.139447 0.0631826
+-0.803265 0.575611 -0.143281 0.0539271
+-0.839192 0.521843 -0.146501 0.0444406
+-0.871525 0.46584 -0.149094 0.0347638
+-0.900126 0.407842 -0.151048 0.0249382
+-0.924873 0.348098 -0.152356 0.0150057
+-0.94566 0.286863 -0.153011 0.00500907
+-0.962397 0.224399 -0.153011 -0.00500908
+-0.975013 0.160975 -0.152356 -0.0150057
+-0.983453 0.0968616 -0.151048 -0.0249382
+-0.987683 0.0323335 -0.149094 -0.0347638
+0.987683 0.0323334 0.104653 0.111737
+0.983453 0.0968617 0.0971212 0.118343
+0.975013 0.160975 0.0891733 0.124441
+0.962397 0.224399 0.0808435 0.130007
+0.94566 0.286863 0.0721676 0.135016
+0.924873 0.348098 0.0631826 0.139447
+0.900126 0.407842 0.0539271 0.143281
+0.871525 0.46584 0.0444406 0.146501
+0.839192 0.521843 0.0347638 0.149094
+0.803265 0.575611 0.0249382 0.151048
+0.763898 0.626915 0.0150058 0.152356
+0.72126 0.675534 0.00500906 0.153011
+0.675534 0.72126 -0.00500907 0.153011
+0.626915 0.763898 -0.0150057 0.152356
+0.575611 0.803265 -0.0249382 0.151048
+0.521843 0.839192 -0.0347638 0.149094
+0.46584 0.871525 -0.0444406 0.146501
+0.407842 0.900126 -0.0539271 0.143281
+0.348098 0.924873 -0.0631826 0.139447
+0.286863 0.94566 -0.0721676 0.135016
+0.224399 0.962397 -0.0808436 0.130007
+0.160975 0.975013 -0.0891733 0.124441
+0.0968616 0.983453 -0.0971213 0.118343
+0.0323333 0.987683 -0.104653 0.111737
+-0.0323335 0.987683 -0.111737 0.104653
+-0.0968618 0.983453 -0.118343 0.0971212
+-0.160975 0.975013 -0.124441 0.0891733
+-0.224399 0.962397 -0.130007 0.0808435
+-0.286863 0.94566 -0.135016 0.0721676
+-0.348098 0.924873 -0.139447 0.0631826
+-0.407842 0.900126 -0.143281 0.053927
+-0.46584 0.871525 -0.146501 0.0444406
+-0.521843 0.839192 -0.149094 0.0347638
+-0.575611 0.803265 -0.151048 0.0249382
+-0.626915 0.763898 -0.152356 0.0150058
+-0.675534 0.72126 -0.153011 0.00500906
+-0.72126 0.675534 -0.153011 -0.00500906
+-0.763898 0.626915 -0.152356 -0.0150058
+-0.803265 0.575611 -0.151048 -0.0249382
+-0.839192 0.521843 -0.149094 -0.0347638
+-0.871525 0.46584 -0.146501 -0.0444406
+-0.900126 0.407842 -0.143281 -0.0539271
+-0.924873 0.348098 -0.139447 -0.0631826
+-0.94566 0.286863 -0.135016 -0.0721676
+-0.962397 0.224399 -0.130007 -0.0808436
+-0.975013 0.160975 -0.124441 -0.0891733
+-0.983453 0.0968616 -0.118343 -0.0971212
+-0.987683 0.0323335 -0.111737 -0.104653
+0.994245 0.0325482 0.0359514 0.0955205
+0.989988 0.0975053 0.0296271 0.0976673
+0.981491 0.162045 0.0231759 0.0993959
+0.968791 0.22589 0.0166254 0.100699
+0.951943 0.288769 0.0100038 0.101571
+0.931019 0.350411 0.00333938 0.102007
+0.906107 0.410552 -0.00333938 0.102007
+0.877316 0.468935 -0.0100038 0.101571
+0.844768 0.52531 -0.0166255 0.100699
+0.808602 0.579436 -0.0231759 0.0993959
+0.768974 0.63108 -0.0296271 0.0976673
+0.726053 0.680023 -0.0359514 0.0955205
+0.680023 0.726053 -0.0421217 0.0929646
+0.631081 0.768974 -0.0481117 0.0900107
+0.579436 0.808602 -0.0538957 0.0866713
+0.52531 0.844768 -0.0594489 0.0829608
+0.468935 0.877316 -0.0647475 0.078895
+0.410552 0.906107 -0.0697689 0.0744914
+0.350411 0.931019 -0.0744914 0.0697688
+0.288769 0.951943 -0.0788951 0.0647475
+0.22589 0.968791 -0.0829608 0.0594489
+0.162045 0.981491 -0.0866713 0.0538957
+0.0975052 0.989988 -0.0900107 0.0481117
+0.0325481 0.994245 -0.0929647 0.0421217
+-0.0325483 0.994245 -0.0955205 0.0359514
+-0.0975054 0.989988 -0.0976673 0.029627
+-0.162045 0.981491 -0.0993959 0.0231759
+-0.225891 0.968791 -0.100699 0.0166254
+-0.288769 0.951943 -0.101571 0.0100038
+-0.350411 0.931019 -0.102007 0.00333936
+-0.410552 0.906107 -0.102007 -0.00333939
+-0.468935 0.877316 -0.101571 -0.0100038
+-0.52531 0.844768 -0.100699 -0.0166254
+-0.579436 0.808602 -0.0993959 -0.0231759
+-0.63108 0.768974 -0.0976673 -0.0296271
+-0.680023 0.726053 -0.0955205 -0.0359514
+-0.726053 0.680023 -0.0929647 -0.0421217
+-0.768974 0.63108 -0.0900107 -0.0481117
+-0.808602 0.579436 -0.0866713 -0.0538957
+-0.844768 0.52531 -0.0829608 -0.0594489
+-0.877316 0.468935 -0.0788951 -0.0647475
+-0.906107 0.410552 -0.0744914 -0.0697688
+-0.931019 0.350411 -0.0697688 -0.0744914
+-0.951943 0.288769 -0.0647475 -0.078895
+-0.968791 0.22589 -0.0594489 -0.0829608
+-0.981491 0.162045 -0.0538957 -0.0866713
+-0.989988 0.0975053 -0.0481117 -0.0900107
+-0.994245 0.0325483 -0.0421217 -0.0929646
+0.994245 0.0325482 0.0929646 0.0421217
+0.989988 0.0975053 0.0900107 0.0481117
+0.981491 0.162045 0.0866713 0.0538957
+0.968791 0.22589 0.0829608 0.0594489
+0.951943 0.288769 0.078895 0.0647475
+0.931019 0.350411 0.0744914 0.0697688
+0.906107 0.410552 0.0697688 0.0744914
+0.877316 0.468935 0.0647475 0.0788951
+0.844768 0.52531 0.0594489 0.0829608
+0.808602 0.579436 0.0538957 0.0866713
+0.768974 0.63108 0.0481117 0.0900107
+0.726053 0.680023 0.0421217 0.0929647
+0.680023 0.726053 0.0359514 0.0955205
+0.631081 0.768974 0.0296271 0.0976673
+0.579436 0.808602 0.0231759 0.0993959
+0.52531 0.844768 0.0166254 0.100699
+0.468935 0.877316 0.0100038 0.101571
+0.410552 0.906107 0.00333937 0.102007
+0.350411 0.931019 -0.00333938 0.102007
+0.288769 0.951943 -0.0100038 0.101571
+0.22589 0.968791 -0.0166255 0.100699
+0.162045 0.981491 -0.0231759 0.0993959
+0.0975052 0.989988 -0.0296271 0.0976673
+0.0325481 0.994245 -0.0359514 0.0955205
+-0.0325483 0.994245 -0.0421217 0.0929646
+-0.0975054 0.989988 -0.0481117 0.0900107
+-0.162045 0.981491 -0.0538957 0.0866713
+-0.225891 0.968791 -0.0594489 0.0829608
+-0.288769 0.951943 -0.0647475 0.078895
+-0.350411 0.931019 -0.0697689 0.0744914
+-0.410552 0.906107 -0.0744914 0.0697688
+-0.468935 0.877316 -0.078895 0.0647475
+-0.52531 0.844768 -0.0829608 0.0594489
+-0.579436 0.808602 -0.0866713 0.0538957
+-0.63108 0.768974 -0.0900107 0.0481117
+-0.680023 0.726053 -0.0929647 0.0421217
+-0.726053 0.680023 -0.0955205 0.0359514
+-0.768974 0.63108 -0.0976673 0.0296271
+-0.808602 0.579436 -0.0993959 0.0231759
+-0.844768 0.52531 -0.100699 0.0166254
+-0.877316 0.468935 -0.101571 0.0100038
+-0.906107 0.410552 -0.102007 0.00333938
+-0.931019 0.350411 -0.102007 -0.00333939
+-0.951943 0.288769 -0.101571 -0.0100038
+-0.968791 0.22589 -0.100699 -0.0166255
+-0.981491 0.162045 -0.0993959 -0.0231759
+-0.989988 0.0975053 -0.0976673 -0.0296271
+-0.994245 0.0325483 -0.0955205 -0.0359514
+0.998162 0.0326765 0.0348844 0.0372457
+0.993888 0.0978894 0.0323737 0.0394475
+0.985358 0.162683 0.0297244 0.0414804
+0.972608 0.22678 0.0269478 0.0433357
+0.955694 0.289906 0.0240559 0.0450054
+0.934687 0.351791 0.0210609 0.0464823
+0.909677 0.412169 0.0179757 0.0477602
+0.880772 0.470783 0.0148135 0.0488337
+0.848096 0.52738 0.0115879 0.049698
+0.811788 0.581719 0.00831273 0.0503494
+0.772003 0.633567 0.00500192 0.0507853
+0.728913 0.682702 0.00166969 0.0510037
+0.682702 0.728913 -0.00166969 0.0510037
+0.633567 0.772003 -0.00500192 0.0507853
+0.581719 0.811788 -0.00831273 0.0503494
+0.52738 0.848096 -0.0115879 0.049698
+0.470782 0.880772 -0.0148135 0.0488337
+0.412169 0.909677 -0.0179757 0.0477602
+0.351791 0.934687 -0.0210609 0.0464823
+0.289906 0.955694 -0.0240559 0.0450054
+0.22678 0.972608 -0.0269479 0.0433357
+0.162683 0.985358 -0.0297244 0.0414804
+0.0978893 0.993888 -0.0323738 0.0394475
+0.0326763 0.998162 -0.0348844 0.0372457
+-0.0326765 0.998162 -0.0372457 0.0348844
+-0.0978895 0.993888 -0.0394475 0.0323737
+-0.162683 0.985358 -0.0414804 0.0297244
+-0.22678 0.972608 -0.0433357 0.0269478
+-0.289907 0.955693 -0.0450054 0.0240559
+-0.351791 0.934686 -0.0464823 0.0210609
+-0.412169 0.909677 -0.0477603 0.0179757
+-0.470782 0.880772 -0.0488337 0.0148135
+-0.52738 0.848096 -0.049698 0.0115879
+-0.581719 0.811788 -0.0503494 0.00831272
+-0.633567 0.772003 -0.0507853 0.00500192
+-0.682702 0.728913 -0.0510037 0.00166969
+-0.728913 0.682702 -0.0510037 -0.00166969
+-0.772003 0.633567 -0.0507853 -0.00500192
+-0.811788 0.581719 -0.0503494 -0.00831272
+-0.848096 0.52738 -0.049698 -0.0115879
+-0.880772 0.470783 -0.0488337 -0.0148135
+-0.909677 0.412169 -0.0477602 -0.0179757
+-0.934687 0.351791 -0.0464823 -0.0210609
+-0.955693 0.289906 -0.0450054 -0.0240559
+-0.972608 0.22678 -0.0433357 -0.0269479
+-0.985358 0.162683 -0.0414804 -0.0297244
+-0.993888 0.0978894 -0.0394475 -0.0323737
+-0.998162 0.0326765 -0.0372457 -0.0348844
+0.735586 0.0240806 -0.49412 0.462794
+0.732436 0.0721387 -0.523331 0.429486
+0.72615 0.119888 -0.5503 0.394339
+0.716754 0.167124 -0.574913 0.357504
+0.704289 0.213644 -0.597064 0.319137
+0.688808 0.259249 -0.616658 0.279404
+0.670378 0.303744 -0.633611 0.238474
+0.649076 0.346939 -0.647852 0.196524
+0.624996 0.388647 -0.659318 0.153731
+0.598239 0.428692 -0.667961 0.110281
+0.56892 0.466901 -0.673743 0.0663579
+0.537165 0.50311 -0.676641 0.0221509
+0.50311 0.537165 -0.676641 -0.0221509
+0.466901 0.56892 -0.673743 -0.0663579
+0.428692 0.598239 -0.667961 -0.110281
+0.388647 0.624996 -0.659318 -0.153731
+0.346939 0.649077 -0.647852 -0.196524
+0.303744 0.670378 -0.633611 -0.238474
+0.259249 0.688808 -0.616658 -0.279404
+0.213644 0.704289 -0.597064 -0.319137
+0.167124 0.716754 -0.574913 -0.357504
+0.119888 0.72615 -0.5503 -0.394339
+0.0721386 0.732436 -0.52333 -0.429486
+0.0240805 0.735586 -0.49412 -0.462794
+-0.0240807 0.735586 -0.462794 -0.49412
+-0.0721387 0.732436 -0.429486 -0.523331
+-0.119888 0.72615 -0.394339 -0.5503
+-0.167124 0.716754 -0.357504 -0.574913
+-0.213644 0.704289 -0.319137 -0.597064
+-0.259249 0.688808 -0.279404 -0.616658
+-0.303744 0.670378 -0.238474 -0.633611
+-0.346939 0.649077 -0.196524 -0.647852
+-0.388647 0.624996 -0.153731 -0.659318
+-0.428692 0.598239 -0.110281 -0.667961
+-0.466901 0.56892 -0.0663579 -0.673743
+-0.50311 0.537165 -0.0221509 -0.676641
+-0.537165 0.50311 0.0221509 -0.676641
+-0.56892 0.466901 0.0663579 -0.673743
+-0.598239 0.428692 0.110281 -0.667961
+-0.624996 0.388647 0.153731 -0.659318
+-0.649076 0.346939 0.196524 -0.647852
+-0.670378 0.303744 0.238474 -0.633611
+-0.688808 0.259249 0.279404 -0.616658
+-0.704289 0.213644 0.319137 -0.597064
+-0.716754 0.167124 0.357504 -0.574913
+-0.72615 0.119888 0.394339 -0.5503
+-0.732436 0.0721386 0.429486 -0.523331
+-0.735586 0.0240807 0.462794 -0.49412
+0.763354 0.0249896 -0.512107 0.392954
+0.760085 0.0748618 -0.536711 0.358619
+0.753561 0.124413 -0.559017 0.322749
+0.743811 0.173432 -0.578929 0.285496
+0.730875 0.221709 -0.596362 0.247021
+0.71481 0.269035 -0.611241 0.207488
+0.695684 0.31521 -0.623502 0.167067
+0.673578 0.360035 -0.633094 0.12593
+0.648589 0.403318 -0.639975 0.0842543
+0.620822 0.444875 -0.644115 0.0422175
+0.590396 0.484526 -0.645497 1.17023e-08
+0.557443 0.522102 -0.644115 -0.0422176
+0.522102 0.557443 -0.639975 -0.0842543
+0.484526 0.590396 -0.633094 -0.12593
+0.444875 0.620822 -0.623502 -0.167067
+0.403318 0.648589 -0.611241 -0.207488
+0.360035 0.673579 -0.596362 -0.247021
+0.31521 0.695684 -0.578929 -0.285496
+0.269035 0.71481 -0.559017 -0.322749
+0.221709 0.730875 -0.536711 -0.358619
+0.173432 0.743811 -0.512107 -0.392954
+0.124413 0.753561 -0.48531 -0.425606
+0.0748617 0.760085 -0.456435 -0.456436
+0.0249895 0.763354 -0.425606 -0.485311
+-0.0249897 0.763354 -0.392954 -0.512107
+-0.0748619 0.760085 -0.358619 -0.536711
+-0.124414 0.753561 -0.322749 -0.559017
+-0.173432 0.743811 -0.285496 -0.578929
+-0.221709 0.730875 -0.247021 -0.596362
+-0.269036 0.71481 -0.207488 -0.611241
+-0.31521 0.695684 -0.167067 -0.623502
+-0.360035 0.673579 -0.12593 -0.633094
+-0.403318 0.648589 -0.0842543 -0.639975
+-0.444875 0.620822 -0.0422175 -0.644115
+-0.484526 0.590397 -2.19614e-08 -0.645497
+-0.522102 0.557443 0.0422176 -0.644115
+-0.557443 0.522102 0.0842543 -0.639975
+-0.590397 0.484526 0.12593 -0.633094
+-0.620822 0.444875 0.167067 -0.623502
+-0.648589 0.403318 0.207488 -0.611241
+-0.673578 0.360035 0.247021 -0.596362
+-0.695684 0.31521 0.285496 -0.578929
+-0.71481 0.269035 0.322749 -0.559017
+-0.730875 0.221709 0.358619 -0.536711
+-0.743811 0.173432 0.392954 -0.512107
+-0.753561 0.124414 0.425606 -0.485311
+-0.760085 0.0748618 0.456435 -0.456435
+-0.763354 0.0249897 0.48531 -0.425606
+0.763354 0.0249896 -0.425606 0.485311
+0.760085 0.0748618 -0.456435 0.456435
+0.753561 0.124413 -0.485311 0.425606
+0.743811 0.173432 -0.512107 0.392954
+0.730875 0.221709 -0.536711 0.358619
+0.71481 0.269035 -0.559017 0.322749
+0.695684 0.31521 -0.578929 0.285496
+0.673578 0.360035 -0.596362 0.247021
+0.648589 0.403318 -0.611241 0.207488
+0.620822 0.444875 -0.623502 0.167067
+0.590396 0.484526 -0.633094 0.12593
+0.557443 0.522102 -0.639975 0.0842543
+0.522102 0.557443 -0.644115 0.0422175
+0.484526 0.590396 -0.645497 -1.44328e-09
+0.444875 0.620822 -0.644115 -0.0422176
+0.403318 0.648589 -0.639975 -0.0842544
+0.360035 0.673579 -0.633094 -0.12593
+0.31521 0.695684 -0.623502 -0.167067
+0.269035 0.71481 -0.611241 -0.207488
+0.221709 0.730875 -0.596362 -0.247021
+0.173432 0.743811 -0.578929 -0.285496
+0.124413 0.753561 -0.559017 -0.322749
+0.0748617 0.760085 -0.536711 -0.358619
+0.0249895 0.763354 -0.512107 -0.392954
+-0.0249897 0.763354 -0.48531 -0.425606
+-0.0748619 0.760085 -0.456435 -0.456436
+-0.124414 0.753561 -0.425606 -0.485311
+-0.173432 0.743811 -0.392954 -0.512107
+-0.221709 0.730875 -0.358619 -0.536711
+-0.269036 0.71481 -0.322749 -0.559017
+-0.31521 0.695684 -0.285496 -0.578929
+-0.360035 0.673579 -0.247021 -0.596362
+-0.403318 0.648589 -0.207488 -0.611241
+-0.444875 0.620822 -0.167067 -0.623502
+-0.484526 0.590397 -0.12593 -0.633094
+-0.522102 0.557443 -0.0842542 -0.639975
+-0.557443 0.522102 -0.0422176 -0.644115
+-0.590397 0.484526 2.96589e-08 -0.645497
+-0.620822 0.444875 0.0422175 -0.644115
+-0.648589 0.403318 0.0842543 -0.639975
+-0.673578 0.360035 0.12593 -0.633094
+-0.695684 0.31521 0.167067 -0.623502
+-0.71481 0.269035 0.207488 -0.611241
+-0.730875 0.221709 0.247021 -0.596362
+-0.743811 0.173432 0.285496 -0.578929
+-0.753561 0.124414 0.322749 -0.559017
+-0.760085 0.0748618 0.358619 -0.536711
+-0.763354 0.0249897 0.392954 -0.512107
+0.790146 0.0258667 -0.446949 0.418613
+0.786763 0.0774894 -0.47337 0.388485
+0.78001 0.12878 -0.497765 0.356693
+0.769917 0.17952 -0.520028 0.323374
+0.756528 0.22949 -0.540064 0.28867
+0.739899 0.278478 -0.557788 0.25273
+0.720101 0.326274 -0.573123 0.215708
+0.69722 0.372672 -0.586004 0.177762
+0.671353 0.417474 -0.596375 0.139055
+0.642612 0.460489 -0.604193 0.0997527
+0.611118 0.501532 -0.609424 0.060023
+0.577008 0.540427 -0.612045 0.0200362
+0.540427 0.577008 -0.612045 -0.0200363
+0.501532 0.611118 -0.609424 -0.060023
+0.460489 0.642612 -0.604193 -0.0997527
+0.417474 0.671353 -0.596375 -0.139055
+0.372672 0.69722 -0.586004 -0.177762
+0.326274 0.720101 -0.573123 -0.215708
+0.278478 0.739899 -0.557788 -0.25273
+0.22949 0.756528 -0.540064 -0.28867
+0.17952 0.769917 -0.520028 -0.323374
+0.12878 0.78001 -0.497765 -0.356693
+0.0774893 0.786763 -0.47337 -0.388485
+0.0258666 0.790146 -0.446949 -0.418613
+-0.0258668 0.790146 -0.418613 -0.446949
+-0.0774894 0.786763 -0.388485 -0.47337
+-0.12878 0.78001 -0.356693 -0.497765
+-0.17952 0.769917 -0.323374 -0.520028
+-0.22949 0.756528 -0.28867 -0.540064
+-0.278478 0.739899 -0.25273 -0.557788
+-0.326274 0.720101 -0.215708 -0.573123
+-0.372672 0.69722 -0.177762 -0.586004
+-0.417474 0.671353 -0.139055 -0.596375
+-0.460489 0.642612 -0.0997526 -0.604193
+-0.501532 0.611118 -0.060023 -0.609424
+-0.540427 0.577008 -0.0200362 -0.612045
+-0.577008 0.540427 0.0200362 -0.612045
+-0.611118 0.501532 0.060023 -0.609424
+-0.642612 0.460489 0.0997526 -0.604193
+-0.671353 0.417474 0.139055 -0.596375
+-0.69722 0.372672 0.177762 -0.586004
+-0.720101 0.326274 0.215708 -0.573123
+-0.739899 0.278478 0.25273 -0.557788
+-0.756528 0.22949 0.28867 -0.540064
+-0.769917 0.179519 0.323374 -0.520028
+-0.78001 0.12878 0.356693 -0.497765
+-0.786763 0.0774893 0.388485 -0.47337
+-0.790146 0.0258668 0.418613 -0.446949
+0.790146 0.0258667 -0.520028 0.323374
+0.786763 0.0774894 -0.540064 0.28867
+0.78001 0.12878 -0.557788 0.25273
+0.769917 0.17952 -0.573123 0.215708
+0.756528 0.22949 -0.586004 0.177762
+0.739899 0.278478 -0.596375 0.139055
+0.720101 0.326274 -0.604193 0.0997527
+0.69722 0.372672 -0.609424 0.060023
+0.671353 0.417474 -0.612045 0.0200363
+0.642612 0.460489 -0.612045 -0.0200363
+0.611118 0.501532 -0.609424 -0.060023
+0.577008 0.540427 -0.604193 -0.0997527
+0.540427 0.577008 -0.596375 -0.139055
+0.501532 0.611118 -0.586004 -0.177762
+0.460489 0.642612 -0.573123 -0.215708
+0.417474 0.671353 -0.557788 -0.25273
+0.372672 0.69722 -0.540064 -0.28867
+0.326274 0.720101 -0.520028 -0.323374
+0.278478 0.739899 -0.497765 -0.356693
+0.22949 0.756528 -0.47337 -0.388485
+0.17952 0.769917 -0.446949 -0.418613
+0.12878 0.78001 -0.418613 -0.446949
+0.0774893 0.786763 -0.388485 -0.47337
+0.0258666 0.790146 -0.356693 -0.497765
+-0.0258668 0.790146 -0.323374 -0.520028
+-0.0774894 0.786763 -0.28867 -0.540064
+-0.12878 0.78001 -0.25273 -0.557788
+-0.17952 0.769917 -0.215708 -0.573123
+-0.22949 0.756528 -0.177762 -0.586004
+-0.278478 0.739899 -0.139055 -0.596375
+-0.326274 0.720101 -0.0997526 -0.604193
+-0.372672 0.69722 -0.0600231 -0.609424
+-0.417474 0.671353 -0.0200363 -0.612045
+-0.460489 0.642612 0.0200363 -0.612045
+-0.501532 0.611118 0.060023 -0.609424
+-0.540427 0.577008 0.0997527 -0.604193
+-0.577008 0.540427 0.139055 -0.596375
+-0.611118 0.501532 0.177762 -0.586004
+-0.642612 0.460489 0.215708 -0.573123
+-0.671353 0.417474 0.25273 -0.557788
+-0.69722 0.372672 0.28867 -0.540064
+-0.720101 0.326274 0.323374 -0.520028
+-0.739899 0.278478 0.356693 -0.497765
+-0.756528 0.22949 0.388485 -0.47337
+-0.769917 0.179519 0.418613 -0.446949
+-0.78001 0.12878 0.446949 -0.418613
+-0.786763 0.0774893 0.47337 -0.388485
+-0.790146 0.0258668 0.497765 -0.356693
+0.816059 0.026715 -0.51781 0.255355
+0.812565 0.0800307 -0.533402 0.220942
+0.805591 0.133004 -0.54671 0.185583
+0.795167 0.185407 -0.557678 0.149429
+0.781339 0.237016 -0.566257 0.112635
+0.764164 0.287611 -0.572411 0.0753593
+0.743717 0.336974 -0.576114 0.0377605
+0.720086 0.384894 -0.57735 -1.21881e-08
+0.693371 0.431166 -0.576114 -0.0377605
+0.663687 0.475591 -0.572411 -0.0753593
+0.63116 0.51798 -0.566257 -0.112635
+0.595932 0.558151 -0.557678 -0.149429
+0.558151 0.595932 -0.54671 -0.185583
+0.51798 0.63116 -0.533402 -0.220942
+0.475591 0.663687 -0.51781 -0.255356
+0.431166 0.693371 -0.5 -0.288675
+0.384894 0.720086 -0.480049 -0.320759
+0.336974 0.743717 -0.458043 -0.351469
+0.287611 0.764164 -0.434075 -0.380674
+0.237016 0.781339 -0.408248 -0.408248
+0.185407 0.795167 -0.380673 -0.434075
+0.133003 0.805591 -0.351469 -0.458043
+0.0800306 0.812565 -0.320759 -0.480049
+0.0267149 0.816059 -0.288675 -0.5
+-0.0267151 0.816059 -0.255355 -0.51781
+-0.0800307 0.812565 -0.220942 -0.533402
+-0.133004 0.805591 -0.185583 -0.54671
+-0.185407 0.795167 -0.149429 -0.557678
+-0.237017 0.781338 -0.112635 -0.566257
+-0.287611 0.764164 -0.0753592 -0.572411
+-0.336974 0.743717 -0.0377604 -0.576114
+-0.384894 0.720086 -6.58134e-08 -0.57735
+-0.431166 0.693371 0.0377605 -0.576114
+-0.475591 0.663686 0.0753594 -0.572411
+-0.51798 0.63116 0.112635 -0.566257
+-0.558151 0.595931 0.149429 -0.557678
+-0.595931 0.558151 0.185583 -0.54671
+-0.63116 0.51798 0.220942 -0.533402
+-0.663686 0.475591 0.255355 -0.51781
+-0.693371 0.431166 0.288675 -0.5
+-0.720086 0.384894 0.320759 -0.480049
+-0.743717 0.336974 0.351469 -0.458043
+-0.764164 0.287611 0.380674 -0.434075
+-0.781339 0.237016 0.408248 -0.408248
+-0.795167 0.185407 0.434075 -0.380673
+-0.805591 0.133004 0.458043 -0.351469
+-0.812565 0.0800306 0.480049 -0.320759
+-0.816059 0.0267151 0.5 -0.288675
+0.816059 0.026715 -0.458043 0.351469
+0.812565 0.0800307 -0.480049 0.320759
+0.805591 0.133004 -0.5 0.288675
+0.795167 0.185407 -0.51781 0.255355
+0.781339 0.237016 -0.533402 0.220942
+0.764164 0.287611 -0.54671 0.185583
+0.743717 0.336974 -0.557678 0.149429
+0.720086 0.384894 -0.566257 0.112635
+0.693371 0.431166 -0.572411 0.0753593
+0.663687 0.475591 -0.576114 0.0377605
+0.63116 0.51798 -0.57735 1.04669e-08
+0.595932 0.558151 -0.576114 -0.0377605
+0.558151 0.595932 -0.572411 -0.0753593
+0.51798 0.63116 -0.566257 -0.112635
+0.475591 0.663687 -0.557678 -0.149429
+0.431166 0.693371 -0.54671 -0.185583
+0.384894 0.720086 -0.533402 -0.220942
+0.336974 0.743717 -0.51781 -0.255356
+0.287611 0.764164 -0.5 -0.288675
+0.237016 0.781339 -0.480049 -0.320759
+0.185407 0.795167 -0.458043 -0.351469
+0.133003 0.805591 -0.434075 -0.380674
+0.0800306 0.812565 -0.408248 -0.408248
+0.0267149 0.816059 -0.380673 -0.434075
+-0.0267151 0.816059 -0.351469 -0.458043
+-0.0800307 0.812565 -0.320759 -0.480049
+-0.133004 0.805591 -0.288675 -0.5
+-0.185407 0.795167 -0.255355 -0.51781
+-0.237017 0.781338 -0.220942 -0.533402
+-0.287611 0.764164 -0.185583 -0.54671
+-0.336974 0.743717 -0.149429 -0.557678
+-0.384894 0.720086 -0.112636 -0.566257
+-0.431166 0.693371 -0.0753593 -0.572411
+-0.475591 0.663686 -0.0377605 -0.576114
+-0.51798 0.63116 -1.96429e-08 -0.57735
+-0.558151 0.595931 0.0377606 -0.576114
+-0.595931 0.558151 0.0753593 -0.572411
+-0.63116 0.51798 0.112635 -0.566257
+-0.663686 0.475591 0.149429 -0.557678
+-0.693371 0.431166 0.185583 -0.54671
+-0.720086 0.384894 0.220942 -0.533402
+-0.743717 0.336974 0.255355 -0.51781
+-0.764164 0.287611 0.288675 -0.5
+-0.781339 0.237016 0.320759 -0.480049
+-0.795167 0.185407 0.351469 -0.458043
+-0.805591 0.133004 0.380673 -0.434075
+-0.812565 0.0800306 0.408248 -0.408248
+-0.816059 0.0267151 0.434075 -0.380674
+0.841175 0.0275372 -0.458622 0.285189
+0.837573 0.0824937 -0.476292 0.254583
+0.830384 0.137097 -0.491923 0.222887
+0.81964 0.191113 -0.505447 0.190237
+0.805385 0.244311 -0.516807 0.156772
+0.787682 0.296463 -0.525954 0.122635
+0.766606 0.347345 -0.532848 0.0879736
+0.742247 0.396739 -0.537461 0.0529353
+0.71471 0.444435 -0.539773 0.0176703
+0.684112 0.490228 -0.539773 -0.0176703
+0.650585 0.533921 -0.537461 -0.0529353
+0.614272 0.575329 -0.532848 -0.0879736
+0.575329 0.614272 -0.525954 -0.122635
+0.533922 0.650585 -0.516807 -0.156772
+0.490228 0.684112 -0.505447 -0.190237
+0.444435 0.71471 -0.491923 -0.222887
+0.396739 0.742247 -0.476292 -0.254583
+0.347345 0.766606 -0.458622 -0.285189
+0.296463 0.787682 -0.438987 -0.314574
+0.244311 0.805385 -0.417473 -0.342612
+0.191113 0.81964 -0.394172 -0.369182
+0.137097 0.830384 -0.369182 -0.394172
+0.0824936 0.837573 -0.342611 -0.417473
+0.0275371 0.841175 -0.314574 -0.438987
+-0.0275373 0.841175 -0.285189 -0.458622
+-0.0824938 0.837573 -0.254583 -0.476292
+-0.137097 0.830384 -0.222887 -0.491923
+-0.191113 0.81964 -0.190237 -0.505447
+-0.244311 0.805385 -0.156772 -0.516807
+-0.296463 0.787682 -0.122635 -0.525954
+-0.347345 0.766606 -0.0879735 -0.532848
+-0.396739 0.742247 -0.0529354 -0.537461
+-0.444435 0.71471 -0.0176703 -0.539773
+-0.490228 0.684112 0.0176704 -0.539773
+-0.533921 0.650585 0.0529353 -0.537461
+-0.575329 0.614272 0.0879736 -0.532848
+-0.614272 0.575329 0.122635 -0.525954
+-0.650585 0.533921 0.156772 -0.516807
+-0.684112 0.490228 0.190237 -0.505447
+-0.71471 0.444435 0.222887 -0.491923
+-0.742247 0.39674 0.254583 -0.476292
+-0.766606 0.347345 0.285189 -0.458622
+-0.787682 0.296463 0.314574 -0.438987
+-0.805385 0.244311 0.342612 -0.417473
+-0.81964 0.191113 0.369182 -0.394172
+-0.830384 0.137097 0.394172 -0.369182
+-0.837573 0.0824937 0.417473 -0.342612
+-0.841175 0.0275373 0.438987 -0.314574
+0.790146 0.0258667 -0.356693 0.497765
+0.786763 0.0774894 -0.388485 0.47337
+0.78001 0.12878 -0.418613 0.446949
+0.769917 0.17952 -0.446949 0.418613
+0.756528 0.22949 -0.47337 0.388485
+0.739899 0.278478 -0.497765 0.356693
+0.720101 0.326274 -0.520028 0.323374
+0.69722 0.372672 -0.540064 0.28867
+0.671353 0.417474 -0.557788 0.25273
+0.642612 0.460489 -0.573123 0.215708
+0.611118 0.501532 -0.586004 0.177762
+0.577008 0.540427 -0.596375 0.139055
+0.540427 0.577008 -0.604193 0.0997527
+0.501532 0.611118 -0.609424 0.060023
+0.460489 0.642612 -0.612045 0.0200362
+0.417474 0.671353 -0.612045 -0.0200363
+0.372672 0.69722 -0.609424 -0.060023
+0.326274 0.720101 -0.604193 -0.0997527
+0.278478 0.739899 -0.596375 -0.139055
+0.22949 0.756528 -0.586004 -0.177762
+0.17952 0.769917 -0.573123 -0.215708
+0.12878 0.78001 -0.557788 -0.25273
+0.0774893 0.786763 -0.540064 -0.28867
+0.0258666 0.790146 -0.520028 -0.323374
+-0.0258668 0.790146 -0.497765 -0.356693
+-0.0774894 0.786763 -0.47337 -0.388485
+-0.12878 0.78001 -0.446949 -0.418613
+-0.17952 0.769917 -0.418613 -0.446949
+-0.22949 0.756528 -0.388485 -0.47337
+-0.278478 0.739899 -0.356693 -0.497765
+-0.326274 0.720101 -0.323374 -0.520028
+-0.372672 0.69722 -0.28867 -0.540064
+-0.417474 0.671353 -0.25273 -0.557788
+-0.460489 0.642612 -0.215708 -0.573123
+-0.501532 0.611118 -0.177762 -0.586004
+-0.540427 0.577008 -0.139055 -0.596375
+-0.577008 0.540427 -0.0997527 -0.604193
+-0.611118 0.501532 -0.060023 -0.609424
+-0.642612 0.460489 -0.0200363 -0.612045
+-0.671353 0.417474 0.0200363 -0.612045
+-0.69722 0.372672 0.0600229 -0.609424
+-0.720101 0.326274 0.0997527 -0.604193
+-0.739899 0.278478 0.139055 -0.596375
+-0.756528 0.22949 0.177762 -0.586004
+-0.769917 0.179519 0.215708 -0.573123
+-0.78001 0.12878 0.25273 -0.557788
+-0.786763 0.0774893 0.28867 -0.540064
+-0.790146 0.0258668 0.323374 -0.520028
+0.816059 0.026715 -0.380673 0.434075
+0.812565 0.0800307 -0.408248 0.408248
+0.805591 0.133004 -0.434075 0.380673
+0.795167 0.185407 -0.458043 0.351469
+0.781339 0.237016 -0.480049 0.320759
+0.764164 0.287611 -0.5 0.288675
+0.743717 0.336974 -0.51781 0.255355
+0.720086 0.384894 -0.533402 0.220942
+0.693371 0.431166 -0.54671 0.185583
+0.663687 0.475591 -0.557678 0.149429
+0.63116 0.51798 -0.566257 0.112635
+0.595932 0.558151 -0.572411 0.0753593
+0.558151 0.595932 -0.576114 0.0377605
+0.51798 0.63116 -0.57735 -1.29091e-09
+0.475591 0.663687 -0.576114 -0.0377605
+0.431166 0.693371 -0.572411 -0.0753594
+0.384894 0.720086 -0.566257 -0.112635
+0.336974 0.743717 -0.557678 -0.149429
+0.287611 0.764164 -0.54671 -0.185583
+0.237016 0.781339 -0.533402 -0.220942
+0.185407 0.795167 -0.51781 -0.255356
+0.133003 0.805591 -0.5 -0.288675
+0.0800306 0.812565 -0.480049 -0.320759
+0.0267149 0.816059 -0.458043 -0.351469
+-0.0267151 0.816059 -0.434075 -0.380674
+-0.0800307 0.812565 -0.408248 -0.408248
+-0.133004 0.805591 -0.380673 -0.434075
+-0.185407 0.795167 -0.351469 -0.458043
+-0.237017 0.781338 -0.320759 -0.480049
+-0.287611 0.764164 -0.288675 -0.5
+-0.336974 0.743717 -0.255355 -0.51781
+-0.384894 0.720086 -0.220942 -0.533402
+-0.431166 0.693371 -0.185583 -0.54671
+-0.475591 0.663686 -0.149429 -0.557678
+-0.51798 0.63116 -0.112635 -0.566257
+-0.558151 0.595931 -0.0753593 -0.572411
+-0.595931 0.558151 -0.0377605 -0.576114
+-0.63116 0.51798 2.65277e-08 -0.57735
+-0.663686 0.475591 0.0377605 -0.576114
+-0.693371 0.431166 0.0753593 -0.572411
+-0.720086 0.384894 0.112635 -0.566257
+-0.743717 0.336974 0.149429 -0.557678
+-0.764164 0.287611 0.185583 -0.54671
+-0.781339 0.237016 0.220942 -0.533402
+-0.795167 0.185407 0.255356 -0.51781
+-0.805591 0.133004 0.288675 -0.5
+-0.812565 0.0800306 0.320759 -0.480049
+-0.816059 0.0267151 0.351469 -0.458043
+0.816059 0.026715 -0.288675 0.5
+0.812565 0.0800307 -0.320759 0.480049
+0.805591 0.133004 -0.351469 0.458043
+0.795167 0.185407 -0.380673 0.434075
+0.781339 0.237016 -0.408248 0.408248
+0.764164 0.287611 -0.434075 0.380673
+0.743717 0.336974 -0.458043 0.351469
+0.720086 0.384894 -0.480049 0.320759
+0.693371 0.431166 -0.5 0.288675
+0.663687 0.475591 -0.51781 0.255355
+0.63116 0.51798 -0.533402 0.220942
+0.595932 0.558151 -0.54671 0.185583
+0.558151 0.595932 -0.557678 0.149429
+0.51798 0.63116 -0.566257 0.112635
+0.475591 0.663687 -0.572411 0.0753593
+0.431166 0.693371 -0.576114 0.0377605
+0.384894 0.720086 -0.57735 -4.74615e-08
+0.336974 0.743717 -0.576114 -0.0377606
+0.287611 0.764164 -0.572411 -0.0753594
+0.237016 0.781339 -0.566257 -0.112635
+0.185407 0.795167 -0.557678 -0.149429
+0.133003 0.805591 -0.54671 -0.185583
+0.0800306 0.812565 -0.533402 -0.220942
+0.0267149 0.816059 -0.51781 -0.255356
+-0.0267151 0.816059 -0.5 -0.288675
+-0.0800307 0.812565 -0.480049 -0.320759
+-0.133004 0.805591 -0.458043 -0.351469
+-0.185407 0.795167 -0.434075 -0.380674
+-0.237017 0.781338 -0.408248 -0.408248
+-0.287611 0.764164 -0.380673 -0.434075
+-0.336974 0.743717 -0.351469 -0.458043
+-0.384894 0.720086 -0.320759 -0.480049
+-0.431166 0.693371 -0.288675 -0.5
+-0.475591 0.663686 -0.255355 -0.51781
+-0.51798 0.63116 -0.220942 -0.533402
+-0.558151 0.595931 -0.185583 -0.54671
+-0.595931 0.558151 -0.149429 -0.557678
+-0.63116 0.51798 -0.112635 -0.566257
+-0.663686 0.475591 -0.0753594 -0.572411
+-0.693371 0.431166 -0.0377605 -0.576114
+-0.720086 0.384894 -6.49528e-08 -0.57735
+-0.743717 0.336974 0.0377605 -0.576114
+-0.764164 0.287611 0.0753594 -0.572411
+-0.781339 0.237016 0.112635 -0.566257
+-0.795167 0.185407 0.149429 -0.557678
+-0.805591 0.133004 0.185583 -0.54671
+-0.812565 0.0800306 0.220942 -0.533402
+-0.816059 0.0267151 0.255355 -0.51781
+0.841175 0.0275372 -0.314574 0.438987
+0.837573 0.0824937 -0.342612 0.417473
+0.830384 0.137097 -0.369182 0.394172
+0.81964 0.191113 -0.394172 0.369182
+0.805385 0.244311 -0.417473 0.342612
+0.787682 0.296463 -0.438987 0.314574
+0.766606 0.347345 -0.458622 0.285189
+0.742247 0.396739 -0.476292 0.254583
+0.71471 0.444435 -0.491923 0.222887
+0.684112 0.490228 -0.505447 0.190237
+0.650585 0.533921 -0.516807 0.156772
+0.614272 0.575329 -0.525954 0.122635
+0.575329 0.614272 -0.532848 0.0879736
+0.533922 0.650585 -0.537461 0.0529353
+0.490228 0.684112 -0.539773 0.0176703
+0.444435 0.71471 -0.539773 -0.0176704
+0.396739 0.742247 -0.537461 -0.0529354
+0.347345 0.766606 -0.532848 -0.0879736
+0.296463 0.787682 -0.525954 -0.122635
+0.244311 0.805385 -0.516807 -0.156772
+0.191113 0.81964 -0.505447 -0.190237
+0.137097 0.830384 -0.491923 -0.222887
+0.0824936 0.837573 -0.476292 -0.254583
+0.0275371 0.841175 -0.458622 -0.285189
+-0.0275373 0.841175 -0.438987 -0.314574
+-0.0824938 0.837573 -0.417473 -0.342612
+-0.137097 0.830384 -0.394172 -0.369182
+-0.191113 0.81964 -0.369182 -0.394172
+-0.244311 0.805385 -0.342611 -0.417473
+-0.296463 0.787682 -0.314574 -0.438987
+-0.347345 0.766606 -0.285189 -0.458622
+-0.396739 0.742247 -0.254583 -0.476292
+-0.444435 0.71471 -0.222887 -0.491923
+-0.490228 0.684112 -0.190237 -0.505447
+-0.533921 0.650585 -0.156772 -0.516807
+-0.575329 0.614272 -0.122635 -0.525954
+-0.614272 0.575329 -0.0879736 -0.532848
+-0.650585 0.533921 -0.0529353 -0.537461
+-0.684112 0.490228 -0.0176704 -0.539773
+-0.71471 0.444435 0.0176703 -0.539773
+-0.742247 0.39674 0.0529352 -0.537461
+-0.766606 0.347345 0.0879736 -0.532848
+-0.787682 0.296463 0.122635 -0.525954
+-0.805385 0.244311 0.156772 -0.516807
+-0.81964 0.191113 0.190237 -0.505447
+-0.830384 0.137097 0.222887 -0.491923
+-0.837573 0.0824937 0.254583 -0.476292
+-0.841175 0.0275373 0.285189 -0.458622
+0.841175 0.0275372 -0.394172 0.369182
+0.837573 0.0824937 -0.417473 0.342612
+0.830384 0.137097 -0.438987 0.314574
+0.81964 0.191113 -0.458622 0.285189
+0.805385 0.244311 -0.476292 0.254583
+0.787682 0.296463 -0.491923 0.222887
+0.766606 0.347345 -0.505447 0.190237
+0.742247 0.396739 -0.516807 0.156772
+0.71471 0.444435 -0.525954 0.122635
+0.684112 0.490228 -0.532848 0.0879736
+0.650585 0.533921 -0.537461 0.0529353
+0.614272 0.575329 -0.539773 0.0176703
+0.575329 0.614272 -0.539773 -0.0176703
+0.533922 0.650585 -0.537461 -0.0529353
+0.490228 0.684112 -0.532848 -0.0879736
+0.444435 0.71471 -0.525954 -0.122635
+0.396739 0.742247 -0.516807 -0.156772
+0.347345 0.766606 -0.505447 -0.190237
+0.296463 0.787682 -0.491923 -0.222887
+0.244311 0.805385 -0.476292 -0.254583
+0.191113 0.81964 -0.458622 -0.285189
+0.137097 0.830384 -0.438987 -0.314574
+0.0824936 0.837573 -0.417473 -0.342612
+0.0275371 0.841175 -0.394172 -0.369182
+-0.0275373 0.841175 -0.369182 -0.394172
+-0.0824938 0.837573 -0.342611 -0.417473
+-0.137097 0.830384 -0.314574 -0.438987
+-0.191113 0.81964 -0.285189 -0.458622
+-0.244311 0.805385 -0.254583 -0.476292
+-0.296463 0.787682 -0.222887 -0.491923
+-0.347345 0.766606 -0.190237 -0.505447
+-0.396739 0.742247 -0.156772 -0.516807
+-0.444435 0.71471 -0.122635 -0.525954
+-0.490228 0.684112 -0.0879736 -0.532848
+-0.533921 0.650585 -0.0529353 -0.537461
+-0.575329 0.614272 -0.0176703 -0.539773
+-0.614272 0.575329 0.0176703 -0.539773
+-0.650585 0.533921 0.0529353 -0.537461
+-0.684112 0.490228 0.0879736 -0.532848
+-0.71471 0.444435 0.122635 -0.525954
+-0.742247 0.39674 0.156772 -0.516807
+-0.766606 0.347345 0.190237 -0.505447
+-0.787682 0.296463 0.222887 -0.491923
+-0.805385 0.244311 0.254583 -0.476292
+-0.81964 0.191113 0.285189 -0.458622
+-0.830384 0.137097 0.314574 -0.438987
+-0.837573 0.0824937 0.342612 -0.417473
+-0.841175 0.0275373 0.369182 -0.394172
+0.865562 0.0283356 -0.396677 0.304381
+0.861855 0.0848853 -0.415735 0.277785
+0.854458 0.141072 -0.433013 0.25
+0.843402 0.196654 -0.448436 0.221144
+0.828735 0.251394 -0.46194 0.191342
+0.810518 0.305057 -0.473465 0.16072
+0.788831 0.357415 -0.482963 0.12941
+0.763766 0.408242 -0.490393 0.0975452
+0.735431 0.45732 -0.495722 0.0652631
+0.703946 0.50444 -0.498929 0.0327016
+0.669447 0.549401 -0.5 9.06458e-09
+0.632081 0.592008 -0.498929 -0.0327016
+0.592008 0.632081 -0.495722 -0.0652631
+0.549401 0.669447 -0.490393 -0.0975452
+0.50444 0.703946 -0.482963 -0.12941
+0.45732 0.735431 -0.473465 -0.16072
+0.408241 0.763766 -0.46194 -0.191342
+0.357415 0.788831 -0.448436 -0.221144
+0.305057 0.810518 -0.433013 -0.25
+0.251394 0.828735 -0.415735 -0.277785
+0.196654 0.843402 -0.396677 -0.304381
+0.141072 0.854458 -0.37592 -0.329673
+0.0848852 0.861855 -0.353553 -0.353553
+0.0283355 0.865562 -0.329673 -0.37592
+-0.0283356 0.865562 -0.304381 -0.396677
+-0.0848854 0.861855 -0.277785 -0.415735
+-0.141072 0.854458 -0.25 -0.433013
+-0.196654 0.843402 -0.221144 -0.448436
+-0.251394 0.828735 -0.191342 -0.46194
+-0.305058 0.810518 -0.16072 -0.473465
+-0.357415 0.788831 -0.129409 -0.482963
+-0.408241 0.763766 -0.0975452 -0.490393
+-0.45732 0.735431 -0.0652631 -0.495722
+-0.504441 0.703946 -0.0327015 -0.498929
+-0.549401 0.669447 -1.70112e-08 -0.5
+-0.592008 0.632081 0.0327016 -0.498929
+-0.632081 0.592008 0.0652631 -0.495722
+-0.669447 0.549401 0.0975452 -0.490393
+-0.703946 0.504441 0.129409 -0.482963
+-0.735431 0.45732 0.16072 -0.473465
+-0.763766 0.408242 0.191342 -0.46194
+-0.788831 0.357415 0.221144 -0.448436
+-0.810518 0.305057 0.25 -0.433013
+-0.828735 0.251394 0.277785 -0.415735
+-0.843402 0.196654 0.304381 -0.396677
+-0.854458 0.141072 0.329673 -0.37592
+-0.861855 0.0848853 0.353553 -0.353553
+-0.865562 0.0283356 0.37592 -0.329673
+0.865562 0.0283356 -0.329673 0.37592
+0.861855 0.0848853 -0.353553 0.353553
+0.854458 0.141072 -0.37592 0.329673
+0.843402 0.196654 -0.396677 0.304381
+0.828735 0.251394 -0.415735 0.277785
+0.810518 0.305057 -0.433013 0.25
+0.788831 0.357415 -0.448436 0.221144
+0.763766 0.408242 -0.46194 0.191342
+0.735431 0.45732 -0.473465 0.16072
+0.703946 0.50444 -0.482963 0.12941
+0.669447 0.549401 -0.490393 0.0975452
+0.632081 0.592008 -0.495722 0.0652631
+0.592008 0.632081 -0.498929 0.0327016
+0.549401 0.669447 -0.5 -1.11796e-09
+0.50444 0.703946 -0.498929 -0.0327016
+0.45732 0.735431 -0.495722 -0.0652631
+0.408241 0.763766 -0.490393 -0.0975452
+0.357415 0.788831 -0.482963 -0.12941
+0.305057 0.810518 -0.473465 -0.16072
+0.251394 0.828735 -0.46194 -0.191342
+0.196654 0.843402 -0.448436 -0.221144
+0.141072 0.854458 -0.433013 -0.25
+0.0848852 0.861855 -0.415735 -0.277785
+0.0283355 0.865562 -0.396677 -0.304381
+-0.0283356 0.865562 -0.37592 -0.329673
+-0.0848854 0.861855 -0.353553 -0.353553
+-0.141072 0.854458 -0.329673 -0.37592
+-0.196654 0.843402 -0.304381 -0.396677
+-0.251394 0.828735 -0.277785 -0.415735
+-0.305058 0.810518 -0.25 -0.433013
+-0.357415 0.788831 -0.221144 -0.448436
+-0.408241 0.763766 -0.191342 -0.46194
+-0.45732 0.735431 -0.16072 -0.473465
+-0.504441 0.703946 -0.129409 -0.482963
+-0.549401 0.669447 -0.0975452 -0.490393
+-0.592008 0.632081 -0.0652631 -0.495722
+-0.632081 0.592008 -0.0327016 -0.498929
+-0.669447 0.549401 2.29737e-08 -0.5
+-0.703946 0.504441 0.0327015 -0.498929
+-0.735431 0.45732 0.0652631 -0.495722
+-0.763766 0.408242 0.0975451 -0.490393
+-0.788831 0.357415 0.12941 -0.482963
+-0.810518 0.305057 0.16072 -0.473465
+-0.828735 0.251394 0.191342 -0.46194
+-0.843402 0.196654 0.221144 -0.448436
+-0.854458 0.141072 0.25 -0.433013
+-0.861855 0.0848853 0.277785 -0.415735
+-0.865562 0.0283356 0.304381 -0.396677
+0.88928 0.029112 -0.333136 0.312016
+0.885472 0.0872114 -0.352829 0.28956
+0.877872 0.144937 -0.371012 0.265863
+0.866513 0.202043 -0.387606 0.241029
+0.851444 0.258283 -0.40254 0.215162
+0.832728 0.313417 -0.415751 0.188374
+0.810447 0.367209 -0.427181 0.160779
+0.784695 0.419428 -0.436782 0.132496
+0.755583 0.469852 -0.444512 0.103646
+0.723236 0.518263 -0.450339 0.0743513
+0.687791 0.564456 -0.454238 0.0447385
+0.649401 0.608231 -0.456191 0.0149341
+0.608231 0.649401 -0.456191 -0.0149342
+0.564456 0.687791 -0.454238 -0.0447385
+0.518263 0.723236 -0.450339 -0.0743513
+0.469852 0.755583 -0.444512 -0.103646
+0.419428 0.784695 -0.436781 -0.132496
+0.367209 0.810447 -0.427181 -0.160779
+0.313417 0.832728 -0.415751 -0.188374
+0.258283 0.851444 -0.40254 -0.215162
+0.202043 0.866513 -0.387606 -0.241029
+0.144937 0.877872 -0.371012 -0.265864
+0.0872113 0.885472 -0.352829 -0.28956
+0.0291119 0.88928 -0.333136 -0.312016
+-0.0291121 0.88928 -0.312016 -0.333136
+-0.0872115 0.885472 -0.28956 -0.352829
+-0.144937 0.877872 -0.265863 -0.371012
+-0.202043 0.866513 -0.241029 -0.387606
+-0.258283 0.851444 -0.215162 -0.40254
+-0.313417 0.832728 -0.188374 -0.415751
+-0.367209 0.810447 -0.160779 -0.427181
+-0.419428 0.784695 -0.132496 -0.436781
+-0.469852 0.755583 -0.103646 -0.444512
+-0.518263 0.723236 -0.0743512 -0.450339
+-0.564456 0.687791 -0.0447385 -0.454238
+-0.608231 0.649401 -0.0149341 -0.456191
+-0.649401 0.608231 0.0149341 -0.456191
+-0.687791 0.564456 0.0447385 -0.454238
+-0.723236 0.518263 0.0743512 -0.450339
+-0.755583 0.469852 0.103646 -0.444512
+-0.784695 0.419428 0.132496 -0.436782
+-0.810447 0.367209 0.160779 -0.427181
+-0.832728 0.313417 0.188374 -0.415751
+-0.851444 0.258283 0.215162 -0.40254
+-0.866513 0.202043 0.241029 -0.387606
+-0.877872 0.144937 0.265863 -0.371012
+-0.885472 0.0872113 0.28956 -0.352829
+-0.88928 0.0291121 0.312016 -0.333136
+0.841175 0.0275372 -0.505447 0.190237
+0.837573 0.0824937 -0.516807 0.156772
+0.830384 0.137097 -0.525954 0.122635
+0.81964 0.191113 -0.532848 0.0879736
+0.805385 0.244311 -0.537461 0.0529353
+0.787682 0.296463 -0.539773 0.0176703
+0.766606 0.347345 -0.539773 -0.0176703
+0.742247 0.396739 -0.537461 -0.0529353
+0.71471 0.444435 -0.532848 -0.0879736
+0.684112 0.490228 -0.525954 -0.122635
+0.650585 0.533921 -0.516807 -0.156772
+0.614272 0.575329 -0.505447 -0.190237
+0.575329 0.614272 -0.491923 -0.222887
+0.533922 0.650585 -0.476292 -0.254583
+0.490228 0.684112 -0.458622 -0.285189
+0.444435 0.71471 -0.438987 -0.314574
+0.396739 0.742247 -0.417473 -0.342612
+0.347345 0.766606 -0.394172 -0.369182
+0.296463 0.787682 -0.369182 -0.394172
+0.244311 0.805385 -0.342612 -0.417473
+0.191113 0.81964 -0.314574 -0.438987
+0.137097 0.830384 -0.285189 -0.458622
+0.0824936 0.837573 -0.254583 -0.476292
+0.0275371 0.841175 -0.222887 -0.491923
+-0.0275373 0.841175 -0.190237 -0.505447
+-0.0824938 0.837573 -0.156772 -0.516807
+-0.137097 0.830384 -0.122635 -0.525954
+-0.191113 0.81964 -0.0879736 -0.532848
+-0.244311 0.805385 -0.0529352 -0.537461
+-0.296463 0.787682 -0.0176702 -0.539773
+-0.347345 0.766606 0.0176704 -0.539773
+-0.396739 0.742247 0.0529352 -0.537461
+-0.444435 0.71471 0.0879736 -0.532848
+-0.490228 0.684112 0.122635 -0.525954
+-0.533921 0.650585 0.156772 -0.516807
+-0.575329 0.614272 0.190237 -0.505447
+-0.614272 0.575329 0.222887 -0.491923
+-0.650585 0.533921 0.254583 -0.476292
+-0.684112 0.490228 0.285189 -0.458622
+-0.71471 0.444435 0.314574 -0.438987
+-0.742247 0.39674 0.342611 -0.417473
+-0.766606 0.347345 0.369182 -0.394172
+-0.787682 0.296463 0.394172 -0.369182
+-0.805385 0.244311 0.417473 -0.342612
+-0.81964 0.191113 0.438987 -0.314574
+-0.830384 0.137097 0.458622 -0.285189
+-0.837573 0.0824937 0.476292 -0.254583
+-0.841175 0.0275373 0.491923 -0.222887
+0.865562 0.0283356 -0.482963 0.12941
+0.861855 0.0848853 -0.490393 0.0975452
+0.854458 0.141072 -0.495722 0.0652631
+0.843402 0.196654 -0.498929 0.0327016
+0.828735 0.251394 -0.5 -3.72653e-10
+0.810518 0.305057 -0.498929 -0.0327016
+0.788831 0.357415 -0.495722 -0.0652631
+0.763766 0.408242 -0.490393 -0.0975452
+0.735431 0.45732 -0.482963 -0.12941
+0.703946 0.50444 -0.473465 -0.16072
+0.669447 0.549401 -0.46194 -0.191342
+0.632081 0.592008 -0.448436 -0.221144
+0.592008 0.632081 -0.433013 -0.25
+0.549401 0.669447 -0.415735 -0.277785
+0.50444 0.703946 -0.396677 -0.304381
+0.45732 0.735431 -0.37592 -0.329673
+0.408241 0.763766 -0.353553 -0.353553
+0.357415 0.788831 -0.329673 -0.37592
+0.305057 0.810518 -0.304381 -0.396677
+0.251394 0.828735 -0.277785 -0.415735
+0.196654 0.843402 -0.25 -0.433013
+0.141072 0.854458 -0.221144 -0.448436
+0.0848852 0.861855 -0.191342 -0.46194
+0.0283355 0.865562 -0.16072 -0.473465
+-0.0283356 0.865562 -0.129409 -0.482963
+-0.0848854 0.861855 -0.0975451 -0.490393
+-0.141072 0.854458 -0.0652631 -0.495722
+-0.196654 0.843402 -0.0327015 -0.498929
+-0.251394 0.828735 8.1833e-08 -0.5
+-0.305058 0.810518 0.0327016 -0.498929
+-0.357415 0.788831 0.0652632 -0.495722
+-0.408241 0.763766 0.0975451 -0.490393
+-0.45732 0.735431 0.12941 -0.482963
+-0.504441 0.703946 0.16072 -0.473465
+-0.549401 0.669447 0.191342 -0.46194
+-0.592008 0.632081 0.221144 -0.448436
+-0.632081 0.592008 0.25 -0.433013
+-0.669447 0.549401 0.277785 -0.415735
+-0.703946 0.504441 0.304381 -0.396677
+-0.735431 0.45732 0.329673 -0.37592
+-0.763766 0.408242 0.353553 -0.353553
+-0.788831 0.357415 0.37592 -0.329673
+-0.810518 0.305057 0.396677 -0.304381
+-0.828735 0.251394 0.415735 -0.277785
+-0.843402 0.196654 0.433013 -0.25
+-0.854458 0.141072 0.448436 -0.221144
+-0.861855 0.0848853 0.46194 -0.191342
+-0.865562 0.0283356 0.473465 -0.16072
+0.865562 0.0283356 -0.448436 0.221144
+0.861855 0.0848853 -0.46194 0.191342
+0.854458 0.141072 -0.473465 0.16072
+0.843402 0.196654 -0.482963 0.12941
+0.828735 0.251394 -0.490393 0.0975452
+0.810518 0.305057 -0.495722 0.0652631
+0.788831 0.357415 -0.498929 0.0327016
+0.763766 0.408242 -0.5 -1.05552e-08
+0.735431 0.45732 -0.498929 -0.0327016
+0.703946 0.50444 -0.495722 -0.0652631
+0.669447 0.549401 -0.490393 -0.0975452
+0.632081 0.592008 -0.482963 -0.12941
+0.592008 0.632081 -0.473465 -0.16072
+0.549401 0.669447 -0.46194 -0.191342
+0.50444 0.703946 -0.448436 -0.221144
+0.45732 0.735431 -0.433013 -0.25
+0.408241 0.763766 -0.415735 -0.277785
+0.357415 0.788831 -0.396677 -0.304381
+0.305057 0.810518 -0.37592 -0.329673
+0.251394 0.828735 -0.353553 -0.353553
+0.196654 0.843402 -0.329673 -0.37592
+0.141072 0.854458 -0.304381 -0.396677
+0.0848852 0.861855 -0.277785 -0.415735
+0.0283355 0.865562 -0.25 -0.433013
+-0.0283356 0.865562 -0.221144 -0.448436
+-0.0848854 0.861855 -0.191342 -0.46194
+-0.141072 0.854458 -0.16072 -0.473465
+-0.196654 0.843402 -0.129409 -0.482963
+-0.251394 0.828735 -0.0975451 -0.490393
+-0.305058 0.810518 -0.065263 -0.495722
+-0.357415 0.788831 -0.0327015 -0.498929
+-0.408241 0.763766 -5.69961e-08 -0.5
+-0.45732 0.735431 0.0327016 -0.498929
+-0.504441 0.703946 0.0652631 -0.495722
+-0.549401 0.669447 0.0975451 -0.490393
+-0.592008 0.632081 0.12941 -0.482963
+-0.632081 0.592008 0.16072 -0.473465
+-0.669447 0.549401 0.191342 -0.46194
+-0.703946 0.504441 0.221144 -0.448436
+-0.735431 0.45732 0.25 -0.433013
+-0.763766 0.408242 0.277785 -0.415735
+-0.788831 0.357415 0.304381 -0.396677
+-0.810518 0.305057 0.329673 -0.37592
+-0.828735 0.251394 0.353553 -0.353553
+-0.843402 0.196654 0.37592 -0.329673
+-0.854458 0.141072 0.396677 -0.304381
+-0.861855 0.0848853 0.415735 -0.277785
+-0.865562 0.0283356 0.433013 -0.25
+0.88928 0.029112 -0.427181 0.160779
+0.885472 0.0872114 -0.436782 0.132496
+0.877872 0.144937 -0.444512 0.103646
+0.866513 0.202043 -0.450339 0.0743513
+0.851444 0.258283 -0.454238 0.0447385
+0.832728 0.313417 -0.456191 0.0149342
+0.810447 0.367209 -0.456191 -0.0149342
+0.784695 0.419428 -0.454238 -0.0447385
+0.755583 0.469852 -0.450339 -0.0743513
+0.723236 0.518263 -0.444512 -0.103646
+0.687791 0.564456 -0.436782 -0.132496
+0.649401 0.608231 -0.427181 -0.160779
+0.608231 0.649401 -0.415751 -0.188374
+0.564456 0.687791 -0.40254 -0.215162
+0.518263 0.723236 -0.387606 -0.241029
+0.469852 0.755583 -0.371012 -0.265864
+0.419428 0.784695 -0.352829 -0.28956
+0.367209 0.810447 -0.333136 -0.312016
+0.313417 0.832728 -0.312016 -0.333136
+0.258283 0.851444 -0.28956 -0.352829
+0.202043 0.866513 -0.265863 -0.371012
+0.144937 0.877872 -0.241029 -0.387606
+0.0872113 0.885472 -0.215162 -0.40254
+0.0291119 0.88928 -0.188374 -0.415751
+-0.0291121 0.88928 -0.160779 -0.427181
+-0.0872115 0.885472 -0.132496 -0.436782
+-0.144937 0.877872 -0.103646 -0.444512
+-0.202043 0.866513 -0.0743512 -0.450339
+-0.258283 0.851444 -0.0447384 -0.454238
+-0.313417 0.832728 -0.0149341 -0.456191
+-0.367209 0.810447 0.0149342 -0.456191
+-0.419428 0.784695 0.0447384 -0.454238
+-0.469852 0.755583 0.0743513 -0.450339
+-0.518263 0.723236 0.103646 -0.444512
+-0.564456 0.687791 0.132496 -0.436782
+-0.608231 0.649401 0.160779 -0.427181
+-0.649401 0.608231 0.188374 -0.415751
+-0.687791 0.564456 0.215162 -0.40254
+-0.723236 0.518263 0.241029 -0.387606
+-0.755583 0.469852 0.265863 -0.371012
+-0.784695 0.419428 0.28956 -0.352829
+-0.810447 0.367209 0.312016 -0.333136
+-0.832728 0.313417 0.333136 -0.312016
+-0.851444 0.258283 0.352829 -0.28956
+-0.866513 0.202043 0.371012 -0.265863
+-0.877872 0.144937 0.387606 -0.241029
+-0.885472 0.0872113 0.40254 -0.215162
+-0.88928 0.0291121 0.415751 -0.188374
+0.88928 0.029112 -0.450339 0.0743513
+0.885472 0.0872114 -0.454238 0.0447385
+0.877872 0.144937 -0.456191 0.0149342
+0.866513 0.202043 -0.456191 -0.0149342
+0.851444 0.258283 -0.454238 -0.0447385
+0.832728 0.313417 -0.450339 -0.0743513
+0.810447 0.367209 -0.444512 -0.103646
+0.784695 0.419428 -0.436782 -0.132496
+0.755583 0.469852 -0.427181 -0.160779
+0.723236 0.518263 -0.415751 -0.188374
+0.687791 0.564456 -0.40254 -0.215162
+0.649401 0.608231 -0.387606 -0.241029
+0.608231 0.649401 -0.371012 -0.265863
+0.564456 0.687791 -0.352829 -0.28956
+0.518263 0.723236 -0.333136 -0.312016
+0.469852 0.755583 -0.312016 -0.333136
+0.419428 0.784695 -0.28956 -0.352829
+0.367209 0.810447 -0.265863 -0.371012
+0.313417 0.832728 -0.241029 -0.387606
+0.258283 0.851444 -0.215162 -0.40254
+0.202043 0.866513 -0.188374 -0.415751
+0.144937 0.877872 -0.160779 -0.427181
+0.0872113 0.885472 -0.132496 -0.436782
+0.0291119 0.88928 -0.103646 -0.444512
+-0.0291121 0.88928 -0.0743512 -0.450339
+-0.0872115 0.885472 -0.0447385 -0.454238
+-0.144937 0.877872 -0.0149341 -0.456191
+-0.202043 0.866513 0.0149342 -0.456191
+-0.258283 0.851444 0.0447386 -0.454238
+-0.313417 0.832728 0.0743513 -0.450339
+-0.367209 0.810447 0.103646 -0.444512
+-0.419428 0.784695 0.132496 -0.436782
+-0.469852 0.755583 0.160779 -0.427181
+-0.518263 0.723236 0.188374 -0.415751
+-0.564456 0.687791 0.215162 -0.40254
+-0.608231 0.649401 0.241029 -0.387606
+-0.649401 0.608231 0.265863 -0.371012
+-0.687791 0.564456 0.28956 -0.352829
+-0.723236 0.518263 0.312016 -0.333136
+-0.755583 0.469852 0.333136 -0.312016
+-0.784695 0.419428 0.352829 -0.28956
+-0.810447 0.367209 0.371012 -0.265863
+-0.832728 0.313417 0.387606 -0.241029
+-0.851444 0.258283 0.40254 -0.215162
+-0.866513 0.202043 0.415751 -0.188374
+-0.877872 0.144937 0.427181 -0.160779
+-0.885472 0.0872113 0.436782 -0.132496
+-0.88928 0.0291121 0.444512 -0.103646
+0.912382 0.0298683 -0.407374 0.0267007
+0.908475 0.089477 -0.408248 -1.11532e-09
+0.900678 0.148703 -0.407374 -0.0267007
+0.889024 0.207291 -0.404756 -0.0532871
+0.873563 0.264992 -0.400404 -0.0796453
+0.854361 0.321559 -0.394338 -0.105662
+0.831501 0.376748 -0.386583 -0.131227
+0.80508 0.430324 -0.377172 -0.15623
+0.775212 0.482058 -0.366147 -0.180564
+0.742024 0.531727 -0.353553 -0.204124
+0.705659 0.579119 -0.339446 -0.226811
+0.666272 0.624032 -0.323885 -0.248526
+0.624032 0.666272 -0.306937 -0.269177
+0.579119 0.705659 -0.288675 -0.288675
+0.531727 0.742024 -0.269177 -0.306937
+0.482058 0.775212 -0.248526 -0.323885
+0.430324 0.80508 -0.226811 -0.339446
+0.376748 0.831501 -0.204124 -0.353553
+0.321559 0.854361 -0.180564 -0.366147
+0.264992 0.873563 -0.15623 -0.377172
+0.207291 0.889024 -0.131227 -0.386583
+0.148702 0.900678 -0.105662 -0.394338
+0.0894769 0.908475 -0.0796452 -0.400404
+0.0298682 0.912382 -0.0532871 -0.404756
+-0.0298684 0.912382 -0.0267007 -0.407374
+-0.0894771 0.908475 3.41689e-08 -0.408248
+-0.148703 0.900678 0.0267007 -0.407374
+-0.207291 0.889024 0.0532871 -0.404756
+-0.264993 0.873563 0.0796454 -0.400404
+-0.321559 0.854361 0.105662 -0.394338
+-0.376748 0.831501 0.131227 -0.386583
+-0.430324 0.80508 0.15623 -0.377172
+-0.482058 0.775212 0.180564 -0.366147
+-0.531727 0.742024 0.204124 -0.353553
+-0.579119 0.705659 0.226811 -0.339446
+-0.624032 0.666272 0.248526 -0.323885
+-0.666272 0.624032 0.269177 -0.306937
+-0.705659 0.579119 0.288675 -0.288675
+-0.742024 0.531727 0.306937 -0.269177
+-0.775212 0.482058 0.323885 -0.248526
+-0.80508 0.430324 0.339446 -0.226811
+-0.831501 0.376748 0.353553 -0.204124
+-0.854361 0.321559 0.366147 -0.180564
+-0.873563 0.264992 0.377172 -0.15623
+-0.889024 0.207291 0.386583 -0.131227
+-0.900678 0.148703 0.394338 -0.105662
+-0.908475 0.089477 0.400404 -0.0796453
+-0.912382 0.0298684 0.404756 -0.0532871
+0.912382 0.0298683 -0.394338 0.105662
+0.908475 0.089477 -0.400404 0.0796453
+0.900678 0.148703 -0.404756 0.0532871
+0.889024 0.207291 -0.407374 0.0267007
+0.873563 0.264992 -0.408248 -3.0427e-10
+0.854361 0.321559 -0.407374 -0.0267007
+0.831501 0.376748 -0.404756 -0.0532871
+0.80508 0.430324 -0.400404 -0.0796453
+0.775212 0.482058 -0.394338 -0.105662
+0.742024 0.531727 -0.386583 -0.131227
+0.705659 0.579119 -0.377172 -0.15623
+0.666272 0.624032 -0.366147 -0.180564
+0.624032 0.666272 -0.353553 -0.204124
+0.579119 0.705659 -0.339446 -0.226811
+0.531727 0.742024 -0.323885 -0.248526
+0.482058 0.775212 -0.306937 -0.269177
+0.430324 0.80508 -0.288675 -0.288675
+0.376748 0.831501 -0.269177 -0.306937
+0.321559 0.854361 -0.248526 -0.323885
+0.264992 0.873563 -0.226811 -0.339446
+0.207291 0.889024 -0.204124 -0.353553
+0.148702 0.900678 -0.180564 -0.366147
+0.0894769 0.908475 -0.15623 -0.377172
+0.0298682 0.912382 -0.131227 -0.386583
+-0.0298684 0.912382 -0.105662 -0.394338
+-0.0894771 0.908475 -0.0796453 -0.400404
+-0.148703 0.900678 -0.0532871 -0.404756
+-0.207291 0.889024 -0.0267007 -0.407374
+-0.264993 0.873563 6.68164e-08 -0.408248
+-0.321559 0.854361 0.0267008 -0.407374
+-0.376748 0.831501 0.0532872 -0.404756
+-0.430324 0.80508 0.0796452 -0.400404
+-0.482058 0.775212 0.105662 -0.394338
+-0.531727 0.742024 0.131227 -0.386583
+-0.579119 0.705659 0.15623 -0.377172
+-0.624032 0.666272 0.180564 -0.366147
+-0.666272 0.624032 0.204124 -0.353553
+-0.705659 0.579119 0.226811 -0.339446
+-0.742024 0.531727 0.248526 -0.323885
+-0.775212 0.482058 0.269177 -0.306937
+-0.80508 0.430324 0.288675 -0.288675
+-0.831501 0.376748 0.306937 -0.269177
+-0.854361 0.321559 0.323885 -0.248526
+-0.873563 0.264992 0.339446 -0.226811
+-0.889024 0.207291 0.353553 -0.204124
+-0.900678 0.148703 0.366147 -0.180564
+-0.908475 0.089477 0.377172 -0.15623
+-0.912382 0.0298684 0.386583 -0.131227
+0.933521 0.0305603 -0.35609 0.0283599
+0.929524 0.0915501 -0.357182 0.0050098
+0.921546 0.152148 -0.356745 -0.0183618
+0.909622 0.212094 -0.35478 -0.0416547
+0.893803 0.271132 -0.351296 -0.0647692
+0.874156 0.329009 -0.346308 -0.0876064
+0.850766 0.385477 -0.339837 -0.110069
+0.823733 0.440295 -0.33191 -0.132059
+0.793173 0.493227 -0.322563 -0.153484
+0.759216 0.544047 -0.311834 -0.174252
+0.722008 0.592537 -0.299769 -0.194274
+0.681709 0.63849 -0.286421 -0.213464
+0.63849 0.681709 -0.271847 -0.23174
+0.592537 0.722008 -0.256108 -0.249023
+0.544047 0.759216 -0.239273 -0.265241
+0.493227 0.793173 -0.221413 -0.280322
+0.440295 0.823733 -0.202605 -0.294203
+0.385477 0.850766 -0.18293 -0.306824
+0.329009 0.874156 -0.162471 -0.318131
+0.271132 0.893803 -0.141316 -0.328076
+0.212094 0.909622 -0.119556 -0.336616
+0.152148 0.921546 -0.0972846 -0.343715
+0.09155 0.929524 -0.0745963 -0.349342
+0.0305602 0.933521 -0.0515885 -0.353472
+-0.0305604 0.933521 -0.0283599 -0.35609
+-0.0915502 0.929524 -0.00500977 -0.357182
+-0.152148 0.921546 0.0183618 -0.356745
+-0.212094 0.909622 0.0416547 -0.35478
+-0.271132 0.893803 0.0647693 -0.351296
+-0.329009 0.874156 0.0876065 -0.346308
+-0.385477 0.850766 0.110069 -0.339837
+-0.440295 0.823733 0.132059 -0.33191
+-0.493227 0.793173 0.153484 -0.322563
+-0.544047 0.759216 0.174252 -0.311834
+-0.592537 0.722008 0.194274 -0.299769
+-0.63849 0.681709 0.213464 -0.286421
+-0.681709 0.63849 0.23174 -0.271847
+-0.722008 0.592537 0.249023 -0.256108
+-0.759216 0.544047 0.265241 -0.239273
+-0.793173 0.493227 0.280322 -0.221413
+-0.823733 0.440295 0.294203 -0.202605
+-0.850766 0.385477 0.306824 -0.18293
+-0.874156 0.329009 0.318131 -0.162471
+-0.893803 0.271132 0.328076 -0.141316
+-0.909622 0.212094 0.336616 -0.119556
+-0.921546 0.152148 0.343715 -0.0972846
+-0.929524 0.0915501 0.349342 -0.0745963
+-0.933521 0.0305604 0.353472 -0.0515886
+0.88928 0.029112 -0.387606 0.241029
+0.885472 0.0872114 -0.40254 0.215162
+0.877872 0.144937 -0.415751 0.188374
+0.866513 0.202043 -0.427181 0.160779
+0.851444 0.258283 -0.436782 0.132496
+0.832728 0.313417 -0.444512 0.103646
+0.810447 0.367209 -0.450339 0.0743513
+0.784695 0.419428 -0.454238 0.0447385
+0.755583 0.469852 -0.456191 0.0149341
+0.723236 0.518263 -0.456191 -0.0149341
+0.687791 0.564456 -0.454238 -0.0447385
+0.649401 0.608231 -0.450339 -0.0743513
+0.608231 0.649401 -0.444512 -0.103646
+0.564456 0.687791 -0.436782 -0.132496
+0.518263 0.723236 -0.427181 -0.160779
+0.469852 0.755583 -0.415751 -0.188374
+0.419428 0.784695 -0.40254 -0.215162
+0.367209 0.810447 -0.387606 -0.241029
+0.313417 0.832728 -0.371012 -0.265863
+0.258283 0.851444 -0.352829 -0.28956
+0.202043 0.866513 -0.333136 -0.312016
+0.144937 0.877872 -0.312016 -0.333136
+0.0872113 0.885472 -0.28956 -0.352829
+0.0291119 0.88928 -0.265863 -0.371012
+-0.0291121 0.88928 -0.241029 -0.387606
+-0.0872115 0.885472 -0.215162 -0.40254
+-0.144937 0.877872 -0.188374 -0.415751
+-0.202043 0.866513 -0.160779 -0.427181
+-0.258283 0.851444 -0.132496 -0.436782
+-0.313417 0.832728 -0.103646 -0.444512
+-0.367209 0.810447 -0.0743512 -0.450339
+-0.419428 0.784695 -0.0447386 -0.454238
+-0.469852 0.755583 -0.0149342 -0.456191
+-0.518263 0.723236 0.0149342 -0.456191
+-0.564456 0.687791 0.0447385 -0.454238
+-0.608231 0.649401 0.0743513 -0.450339
+-0.649401 0.608231 0.103646 -0.444512
+-0.687791 0.564456 0.132496 -0.436782
+-0.723236 0.518263 0.160779 -0.427181
+-0.755583 0.469852 0.188374 -0.415751
+-0.784695 0.419428 0.215162 -0.40254
+-0.810447 0.367209 0.241029 -0.387606
+-0.832728 0.313417 0.265864 -0.371012
+-0.851444 0.258283 0.28956 -0.352829
+-0.866513 0.202043 0.312016 -0.333136
+-0.877872 0.144937 0.333136 -0.312016
+-0.885472 0.0872113 0.352829 -0.28956
+-0.88928 0.0291121 0.371012 -0.265864
+0.912382 0.0298683 -0.366147 0.180564
+0.908475 0.089477 -0.377172 0.15623
+0.900678 0.148703 -0.386583 0.131227
+0.889024 0.207291 -0.394338 0.105662
+0.873563 0.264992 -0.400404 0.0796453
+0.854361 0.321559 -0.404756 0.0532871
+0.831501 0.376748 -0.407374 0.0267007
+0.80508 0.430324 -0.408248 -8.61828e-09
+0.775212 0.482058 -0.407374 -0.0267007
+0.742024 0.531727 -0.404756 -0.0532871
+0.705659 0.579119 -0.400404 -0.0796453
+0.666272 0.624032 -0.394338 -0.105662
+0.624032 0.666272 -0.386583 -0.131227
+0.579119 0.705659 -0.377172 -0.15623
+0.531727 0.742024 -0.366147 -0.180564
+0.482058 0.775212 -0.353553 -0.204124
+0.430324 0.80508 -0.339446 -0.226811
+0.376748 0.831501 -0.323885 -0.248526
+0.321559 0.854361 -0.306937 -0.269177
+0.264992 0.873563 -0.288675 -0.288675
+0.207291 0.889024 -0.269177 -0.306937
+0.148702 0.900678 -0.248526 -0.323885
+0.0894769 0.908475 -0.226811 -0.339446
+0.0298682 0.912382 -0.204124 -0.353553
+-0.0298684 0.912382 -0.180564 -0.366147
+-0.0894771 0.908475 -0.15623 -0.377172
+-0.148703 0.900678 -0.131227 -0.386583
+-0.207291 0.889024 -0.105662 -0.394338
+-0.264993 0.873563 -0.0796452 -0.400404
+-0.321559 0.854361 -0.053287 -0.404756
+-0.376748 0.831501 -0.0267007 -0.407374
+-0.430324 0.80508 -4.65371e-08 -0.408248
+-0.482058 0.775212 0.0267007 -0.407374
+-0.531727 0.742024 0.0532871 -0.404756
+-0.579119 0.705659 0.0796453 -0.400404
+-0.624032 0.666272 0.105662 -0.394338
+-0.666272 0.624032 0.131227 -0.386583
+-0.705659 0.579119 0.15623 -0.377172
+-0.742024 0.531727 0.180564 -0.366147
+-0.775212 0.482058 0.204124 -0.353553
+-0.80508 0.430324 0.226811 -0.339446
+-0.831501 0.376748 0.248526 -0.323885
+-0.854361 0.321559 0.269177 -0.306937
+-0.873563 0.264992 0.288675 -0.288675
+-0.889024 0.207291 0.306937 -0.269177
+-0.900678 0.148703 0.323885 -0.248526
+-0.908475 0.089477 0.339446 -0.226811
+-0.912382 0.0298684 0.353553 -0.204124
+0.912382 0.0298683 -0.323885 0.248526
+0.908475 0.089477 -0.339446 0.226811
+0.900678 0.148703 -0.353553 0.204124
+0.889024 0.207291 -0.366147 0.180564
+0.873563 0.264992 -0.377172 0.15623
+0.854361 0.321559 -0.386583 0.131227
+0.831501 0.376748 -0.394338 0.105662
+0.80508 0.430324 -0.400404 0.0796453
+0.775212 0.482058 -0.404756 0.0532871
+0.742024 0.531727 -0.407374 0.0267007
+0.705659 0.579119 -0.408248 7.4012e-09
+0.666272 0.624032 -0.407374 -0.0267007
+0.624032 0.666272 -0.404756 -0.0532871
+0.579119 0.705659 -0.400404 -0.0796453
+0.531727 0.742024 -0.394338 -0.105662
+0.482058 0.775212 -0.386583 -0.131227
+0.430324 0.80508 -0.377172 -0.15623
+0.376748 0.831501 -0.366147 -0.180564
+0.321559 0.854361 -0.353553 -0.204124
+0.264992 0.873563 -0.339446 -0.226811
+0.207291 0.889024 -0.323885 -0.248526
+0.148702 0.900678 -0.306937 -0.269177
+0.0894769 0.908475 -0.288675 -0.288675
+0.0298682 0.912382 -0.269177 -0.306937
+-0.0298684 0.912382 -0.248526 -0.323885
+-0.0894771 0.908475 -0.226811 -0.339446
+-0.148703 0.900678 -0.204124 -0.353553
+-0.207291 0.889024 -0.180564 -0.366147
+-0.264993 0.873563 -0.15623 -0.377172
+-0.321559 0.854361 -0.131227 -0.386583
+-0.376748 0.831501 -0.105662 -0.394338
+-0.430324 0.80508 -0.0796453 -0.400404
+-0.482058 0.775212 -0.0532871 -0.404756
+-0.531727 0.742024 -0.0267007 -0.407374
+-0.579119 0.705659 -1.38896e-08 -0.408248
+-0.624032 0.666272 0.0267007 -0.407374
+-0.666272 0.624032 0.0532871 -0.404756
+-0.705659 0.579119 0.0796453 -0.400404
+-0.742024 0.531727 0.105662 -0.394338
+-0.775212 0.482058 0.131227 -0.386583
+-0.80508 0.430324 0.15623 -0.377172
+-0.831501 0.376748 0.180564 -0.366147
+-0.854361 0.321559 0.204124 -0.353553
+-0.873563 0.264992 0.226811 -0.339446
+-0.889024 0.207291 0.248526 -0.323885
+-0.900678 0.148703 0.269177 -0.306937
+-0.908475 0.089477 0.288675 -0.288675
+-0.912382 0.0298684 0.306937 -0.269177
+0.933521 0.0305603 -0.308521 0.180053
+0.929524 0.0915501 -0.319636 0.159489
+0.921546 0.152148 -0.329383 0.138242
+0.909622 0.212094 -0.337719 0.116404
+0.893803 0.271132 -0.344609 0.0940667
+0.874156 0.329009 -0.350024 0.0713268
+0.850766 0.385477 -0.353939 0.0482814
+0.823733 0.440295 -0.356339 0.0250293
+0.793173 0.493227 -0.357213 0.00166998
+0.759216 0.544047 -0.356558 -0.0216965
+0.722008 0.592537 -0.354375 -0.04497
+0.681709 0.63849 -0.350675 -0.068051
+0.63849 0.681709 -0.345474 -0.0908405
+0.592537 0.722008 -0.338793 -0.113241
+0.544047 0.759216 -0.330661 -0.135157
+0.493227 0.793173 -0.321114 -0.156494
+0.440295 0.823733 -0.310191 -0.17716
+0.385477 0.850766 -0.29794 -0.197069
+0.329009 0.874156 -0.284413 -0.216133
+0.271132 0.893803 -0.269668 -0.234272
+0.212094 0.909622 -0.253769 -0.251407
+0.152148 0.921546 -0.236783 -0.267466
+0.09155 0.929524 -0.218783 -0.28238
+0.0305602 0.933521 -0.199846 -0.296084
+-0.0305604 0.933521 -0.180053 -0.308521
+-0.0915502 0.929524 -0.159489 -0.319636
+-0.152148 0.921546 -0.138242 -0.329383
+-0.212094 0.909622 -0.116404 -0.337719
+-0.271132 0.893803 -0.0940666 -0.344609
+-0.329009 0.874156 -0.0713267 -0.350024
+-0.385477 0.850766 -0.0482813 -0.353939
+-0.440295 0.823733 -0.0250293 -0.356339
+-0.493227 0.793173 -0.00166998 -0.357213
+-0.544047 0.759216 0.0216965 -0.356558
+-0.592537 0.722008 0.04497 -0.354375
+-0.63849 0.681709 0.068051 -0.350675
+-0.681709 0.63849 0.0908405 -0.345474
+-0.722008 0.592537 0.113241 -0.338793
+-0.759216 0.544047 0.135157 -0.330661
+-0.793173 0.493227 0.156494 -0.321114
+-0.823733 0.440295 0.17716 -0.310191
+-0.850766 0.385477 0.197069 -0.29794
+-0.874156 0.329009 0.216133 -0.284413
+-0.893803 0.271132 0.234272 -0.269668
+-0.909622 0.212094 0.251407 -0.253769
+-0.921546 0.152148 0.267466 -0.236783
+-0.929524 0.0915501 0.28238 -0.218783
+-0.933521 0.0305604 0.296084 -0.199846
+0.933521 0.0305603 -0.340851 0.106886
+0.929524 0.0915501 -0.347112 0.0843647
+0.921546 0.152148 -0.351887 0.0614818
+0.909622 0.212094 -0.355154 0.0383357
+0.893803 0.271132 -0.356901 0.0150254
+0.874156 0.329009 -0.35712 -0.00834917
+0.850766 0.385477 -0.355809 -0.031688
+0.823733 0.440295 -0.352975 -0.0548912
+0.793173 0.493227 -0.348629 -0.0778593
+0.759216 0.544047 -0.34279 -0.100494
+0.722008 0.592537 -0.335484 -0.122698
+0.681709 0.63849 -0.32674 -0.144377
+0.63849 0.681709 -0.316598 -0.165438
+0.592537 0.722008 -0.3051 -0.18579
+0.544047 0.759216 -0.292296 -0.205347
+0.493227 0.793173 -0.278239 -0.224025
+0.440295 0.823733 -0.262992 -0.241743
+0.385477 0.850766 -0.246618 -0.258426
+0.329009 0.874156 -0.229188 -0.274002
+0.271132 0.893803 -0.210777 -0.288405
+0.212094 0.909622 -0.191463 -0.301573
+0.152148 0.921546 -0.171329 -0.313449
+0.09155 0.929524 -0.150462 -0.323984
+0.0305602 0.933521 -0.12895 -0.333131
+-0.0305604 0.933521 -0.106886 -0.340851
+-0.0915502 0.929524 -0.0843647 -0.347112
+-0.152148 0.921546 -0.0614818 -0.351887
+-0.212094 0.909622 -0.0383357 -0.355154
+-0.271132 0.893803 -0.0150254 -0.356901
+-0.329009 0.874156 0.00834923 -0.35712
+-0.385477 0.850766 0.0316881 -0.355809
+-0.440295 0.823733 0.0548912 -0.352975
+-0.493227 0.793173 0.0778593 -0.348629
+-0.544047 0.759216 0.100494 -0.34279
+-0.592537 0.722008 0.122698 -0.335484
+-0.63849 0.681709 0.144377 -0.32674
+-0.681709 0.63849 0.165438 -0.316598
+-0.722008 0.592537 0.18579 -0.3051
+-0.759216 0.544047 0.205347 -0.292296
+-0.793173 0.493227 0.224025 -0.278239
+-0.823733 0.440295 0.241743 -0.262992
+-0.850766 0.385477 0.258425 -0.246618
+-0.874156 0.329009 0.274002 -0.229188
+-0.893803 0.271132 0.288405 -0.210777
+-0.909622 0.212094 0.301573 -0.191463
+-0.921546 0.152148 0.313449 -0.171329
+-0.929524 0.0915501 0.323984 -0.150462
+-0.933521 0.0305604 0.333131 -0.12895
+0.951462 0.0311476 -0.304712 0.0300115
+0.947388 0.0933095 -0.306022 0.0100181
+0.939256 0.155072 -0.306022 -0.0100181
+0.927103 0.21617 -0.304712 -0.0300115
+0.91098 0.276343 -0.302097 -0.0498763
+0.890956 0.335332 -0.298188 -0.0695276
+0.867117 0.392885 -0.293002 -0.0888812
+0.839564 0.448756 -0.286561 -0.107854
+0.808416 0.502706 -0.278894 -0.126365
+0.773807 0.554502 -0.270032 -0.144335
+0.735884 0.603924 -0.260014 -0.161687
+0.69481 0.650761 -0.248882 -0.178347
+0.65076 0.69481 -0.236685 -0.194242
+0.603924 0.735884 -0.223474 -0.209307
+0.554502 0.773807 -0.209307 -0.223474
+0.502706 0.808416 -0.194242 -0.236685
+0.448756 0.839564 -0.178347 -0.248882
+0.392885 0.867117 -0.161687 -0.260014
+0.335332 0.890956 -0.144335 -0.270032
+0.276343 0.91098 -0.126365 -0.278894
+0.21617 0.927103 -0.107854 -0.286561
+0.155072 0.939256 -0.0888811 -0.293002
+0.0933094 0.947388 -0.0695276 -0.298188
+0.0311475 0.951462 -0.0498763 -0.302097
+-0.0311477 0.951462 -0.0300115 -0.304712
+-0.0933096 0.947388 -0.0100181 -0.306022
+-0.155072 0.939256 0.0100182 -0.306022
+-0.21617 0.927103 0.0300115 -0.304712
+-0.276343 0.91098 0.0498764 -0.302097
+-0.335332 0.890956 0.0695277 -0.298188
+-0.392886 0.867116 0.0888812 -0.293002
+-0.448756 0.839564 0.107854 -0.286562
+-0.502706 0.808416 0.126365 -0.278894
+-0.554502 0.773807 0.144335 -0.270032
+-0.603924 0.735884 0.161687 -0.260014
+-0.650761 0.69481 0.178347 -0.248882
+-0.69481 0.650761 0.194242 -0.236685
+-0.735884 0.603924 0.209307 -0.223474
+-0.773807 0.554502 0.223474 -0.209307
+-0.808416 0.502706 0.236685 -0.194242
+-0.839564 0.448756 0.248882 -0.178347
+-0.867117 0.392885 0.260014 -0.161687
+-0.890956 0.335332 0.270032 -0.144335
+-0.91098 0.276343 0.278894 -0.126365
+-0.927103 0.21617 0.286562 -0.107854
+-0.939256 0.155072 0.293002 -0.0888812
+-0.947388 0.0933095 0.298188 -0.0695276
+-0.951462 0.0311477 0.302097 -0.0498764
+0.951462 0.0311476 -0.286561 0.107854
+0.947388 0.0933095 -0.293002 0.0888812
+0.939256 0.155072 -0.298188 0.0695276
+0.927103 0.21617 -0.302097 0.0498763
+0.91098 0.276343 -0.304712 0.0300115
+0.890956 0.335332 -0.306022 0.0100181
+0.867117 0.392885 -0.306022 -0.0100181
+0.839564 0.448756 -0.304712 -0.0300115
+0.808416 0.502706 -0.302097 -0.0498764
+0.773807 0.554502 -0.298188 -0.0695276
+0.735884 0.603924 -0.293002 -0.0888812
+0.69481 0.650761 -0.286561 -0.107854
+0.65076 0.69481 -0.278894 -0.126365
+0.603924 0.735884 -0.270032 -0.144335
+0.554502 0.773807 -0.260014 -0.161687
+0.502706 0.808416 -0.248882 -0.178347
+0.448756 0.839564 -0.236685 -0.194242
+0.392885 0.867117 -0.223474 -0.209307
+0.335332 0.890956 -0.209307 -0.223474
+0.276343 0.91098 -0.194242 -0.236685
+0.21617 0.927103 -0.178347 -0.248882
+0.155072 0.939256 -0.161687 -0.260014
+0.0933094 0.947388 -0.144335 -0.270032
+0.0311475 0.951462 -0.126365 -0.278894
+-0.0311477 0.951462 -0.107854 -0.286562
+-0.0933096 0.947388 -0.0888811 -0.293002
+-0.155072 0.939256 -0.0695276 -0.298188
+-0.21617 0.927103 -0.0498763 -0.302097
+-0.276343 0.91098 -0.0300114 -0.304712
+-0.335332 0.890956 -0.0100181 -0.306022
+-0.392886 0.867116 0.0100182 -0.306022
+-0.448756 0.839564 0.0300115 -0.304712
+-0.502706 0.808416 0.0498763 -0.302097
+-0.554502 0.773807 0.0695277 -0.298188
+-0.603924 0.735884 0.0888812 -0.293002
+-0.650761 0.69481 0.107854 -0.286561
+-0.69481 0.650761 0.126365 -0.278894
+-0.735884 0.603924 0.144335 -0.270032
+-0.773807 0.554502 0.161687 -0.260014
+-0.808416 0.502706 0.178347 -0.248882
+-0.839564 0.448756 0.194242 -0.236685
+-0.867117 0.392885 0.209307 -0.223474
+-0.890956 0.335332 0.223474 -0.209307
+-0.91098 0.276343 0.236685 -0.194242
+-0.927103 0.21617 0.248882 -0.178347
+-0.939256 0.155072 0.260014 -0.161687
+-0.947388 0.0933095 0.270032 -0.144335
+-0.951462 0.0311477 0.278894 -0.126365
+0.966382 0.0316361 -0.253185 0.031648
+0.962244 0.0947728 -0.254713 0.0150212
+0.953986 0.157504 -0.25515 -0.00166997
+0.941642 0.21956 -0.254494 -0.018354
+0.925266 0.280676 -0.252749 -0.0349594
+0.904928 0.340591 -0.249921 -0.0514151
+0.880714 0.399046 -0.246023 -0.0676507
+0.85273 0.455794 -0.241072 -0.0835965
+0.821094 0.510589 -0.235089 -0.0991844
+0.785942 0.563198 -0.228098 -0.114348
+0.747424 0.613395 -0.220131 -0.129021
+0.705706 0.660965 -0.211221 -0.143142
+0.660965 0.705706 -0.201407 -0.15665
+0.613395 0.747424 -0.190731 -0.169487
+0.563198 0.785942 -0.179237 -0.181599
+0.510589 0.821094 -0.166976 -0.192933
+0.455793 0.85273 -0.154 -0.203441
+0.399046 0.880714 -0.140365 -0.213077
+0.340591 0.904928 -0.126129 -0.221801
+0.280676 0.925266 -0.111352 -0.229575
+0.21956 0.941642 -0.0960987 -0.236367
+0.157504 0.953986 -0.0804338 -0.242146
+0.0947727 0.962244 -0.0644245 -0.246888
+0.031636 0.966382 -0.0481393 -0.250573
+-0.0316362 0.966382 -0.031648 -0.253185
+-0.0947729 0.962244 -0.0150212 -0.254713
+-0.157504 0.953986 0.00166999 -0.25515
+-0.21956 0.941642 0.018354 -0.254494
+-0.280676 0.925266 0.0349595 -0.252749
+-0.340591 0.904927 0.0514152 -0.249921
+-0.399047 0.880714 0.0676507 -0.246023
+-0.455793 0.85273 0.0835965 -0.241072
+-0.510589 0.821094 0.0991844 -0.235089
+-0.563198 0.785941 0.114348 -0.228098
+-0.613395 0.747424 0.129021 -0.220131
+-0.660966 0.705706 0.143142 -0.211221
+-0.705706 0.660966 0.15665 -0.201407
+-0.747424 0.613395 0.169487 -0.190731
+-0.785942 0.563198 0.181599 -0.179237
+-0.821094 0.510589 0.192933 -0.166976
+-0.85273 0.455794 0.20344 -0.154
+-0.880714 0.399046 0.213077 -0.140365
+-0.904928 0.340591 0.221801 -0.126129
+-0.925266 0.280676 0.229575 -0.111352
+-0.941642 0.21956 0.236367 -0.0960987
+-0.953986 0.157504 0.242146 -0.0804339
+-0.962244 0.0947727 0.246888 -0.0644245
+-0.966382 0.0316362 0.250573 -0.0481394
+0.841175 0.0275372 -0.222887 0.491923
+0.837573 0.0824937 -0.254583 0.476292
+0.830384 0.137097 -0.285189 0.458622
+0.81964 0.191113 -0.314574 0.438987
+0.805385 0.244311 -0.342612 0.417473
+0.787682 0.296463 -0.369182 0.394172
+0.766606 0.347345 -0.394172 0.369182
+0.742247 0.396739 -0.417473 0.342612
+0.71471 0.444435 -0.438987 0.314574
+0.684112 0.490228 -0.458622 0.285189
+0.650585 0.533921 -0.476292 0.254583
+0.614272 0.575329 -0.491923 0.222887
+0.575329 0.614272 -0.505447 0.190237
+0.533922 0.650585 -0.516807 0.156772
+0.490228 0.684112 -0.525954 0.122635
+0.444435 0.71471 -0.532848 0.0879736
+0.396739 0.742247 -0.537461 0.0529353
+0.347345 0.766606 -0.539773 0.0176703
+0.296463 0.787682 -0.539773 -0.0176704
+0.244311 0.805385 -0.537461 -0.0529353
+0.191113 0.81964 -0.532848 -0.0879736
+0.137097 0.830384 -0.525954 -0.122635
+0.0824936 0.837573 -0.516807 -0.156772
+0.0275371 0.841175 -0.505447 -0.190237
+-0.0275373 0.841175 -0.491923 -0.222887
+-0.0824938 0.837573 -0.476292 -0.254583
+-0.137097 0.830384 -0.458622 -0.285189
+-0.191113 0.81964 -0.438987 -0.314574
+-0.244311 0.805385 -0.417473 -0.342612
+-0.296463 0.787682 -0.394172 -0.369182
+-0.347345 0.766606 -0.369182 -0.394172
+-0.396739 0.742247 -0.342612 -0.417473
+-0.444435 0.71471 -0.314574 -0.438987
+-0.490228 0.684112 -0.285189 -0.458622
+-0.533921 0.650585 -0.254583 -0.476292
+-0.575329 0.614272 -0.222887 -0.491923
+-0.614272 0.575329 -0.190237 -0.505447
+-0.650585 0.533921 -0.156772 -0.516807
+-0.684112 0.490228 -0.122635 -0.525954
+-0.71471 0.444435 -0.0879736 -0.532848
+-0.742247 0.39674 -0.0529354 -0.537461
+-0.766606 0.347345 -0.0176703 -0.539773
+-0.787682 0.296463 0.0176704 -0.539773
+-0.805385 0.244311 0.0529353 -0.537461
+-0.81964 0.191113 0.0879736 -0.532848
+-0.830384 0.137097 0.122635 -0.525954
+-0.837573 0.0824937 0.156772 -0.516807
+-0.841175 0.0275373 0.190237 -0.505447
+0.865562 0.0283356 -0.25 0.433013
+0.861855 0.0848853 -0.277785 0.415735
+0.854458 0.141072 -0.304381 0.396677
+0.843402 0.196654 -0.329673 0.37592
+0.828735 0.251394 -0.353553 0.353553
+0.810518 0.305057 -0.37592 0.329673
+0.788831 0.357415 -0.396677 0.304381
+0.763766 0.408242 -0.415735 0.277785
+0.735431 0.45732 -0.433013 0.25
+0.703946 0.50444 -0.448436 0.221144
+0.669447 0.549401 -0.46194 0.191342
+0.632081 0.592008 -0.473465 0.16072
+0.592008 0.632081 -0.482963 0.12941
+0.549401 0.669447 -0.490393 0.0975452
+0.50444 0.703946 -0.495722 0.0652631
+0.45732 0.735431 -0.498929 0.0327015
+0.408241 0.763766 -0.5 -4.11028e-08
+0.357415 0.788831 -0.498929 -0.0327016
+0.305057 0.810518 -0.495722 -0.0652631
+0.251394 0.828735 -0.490393 -0.0975452
+0.196654 0.843402 -0.482963 -0.12941
+0.141072 0.854458 -0.473465 -0.16072
+0.0848852 0.861855 -0.46194 -0.191342
+0.0283355 0.865562 -0.448436 -0.221144
+-0.0283356 0.865562 -0.433013 -0.25
+-0.0848854 0.861855 -0.415735 -0.277785
+-0.141072 0.854458 -0.396677 -0.304381
+-0.196654 0.843402 -0.37592 -0.329673
+-0.251394 0.828735 -0.353553 -0.353553
+-0.305058 0.810518 -0.329673 -0.37592
+-0.357415 0.788831 -0.304381 -0.396677
+-0.408241 0.763766 -0.277785 -0.415735
+-0.45732 0.735431 -0.25 -0.433013
+-0.504441 0.703946 -0.221144 -0.448436
+-0.549401 0.669447 -0.191342 -0.46194
+-0.592008 0.632081 -0.16072 -0.473465
+-0.632081 0.592008 -0.12941 -0.482963
+-0.669447 0.549401 -0.0975451 -0.490393
+-0.703946 0.504441 -0.0652631 -0.495722
+-0.735431 0.45732 -0.0327016 -0.498929
+-0.763766 0.408242 -5.62508e-08 -0.5
+-0.788831 0.357415 0.0327016 -0.498929
+-0.810518 0.305057 0.0652631 -0.495722
+-0.828735 0.251394 0.0975451 -0.490393
+-0.843402 0.196654 0.12941 -0.482963
+-0.854458 0.141072 0.16072 -0.473465
+-0.861855 0.0848853 0.191342 -0.46194
+-0.865562 0.0283356 0.221144 -0.448436
+0.865562 0.0283356 -0.16072 0.473465
+0.861855 0.0848853 -0.191342 0.46194
+0.854458 0.141072 -0.221144 0.448436
+0.843402 0.196654 -0.25 0.433013
+0.828735 0.251394 -0.277785 0.415735
+0.810518 0.305057 -0.304381 0.396677
+0.788831 0.357415 -0.329673 0.37592
+0.763766 0.408242 -0.353553 0.353553
+0.735431 0.45732 -0.37592 0.329673
+0.703946 0.50444 -0.396677 0.304381
+0.669447 0.549401 -0.415735 0.277785
+0.632081 0.592008 -0.433013 0.25
+0.592008 0.632081 -0.448436 0.221144
+0.549401 0.669447 -0.46194 0.191342
+0.50444 0.703946 -0.473465 0.16072
+0.45732 0.735431 -0.482963 0.129409
+0.408241 0.763766 -0.490393 0.0975451
+0.357415 0.788831 -0.495722 0.0652631
+0.305057 0.810518 -0.498929 0.0327015
+0.251394 0.828735 -0.5 -2.1483e-08
+0.196654 0.843402 -0.498929 -0.0327016
+0.141072 0.854458 -0.495722 -0.0652632
+0.0848852 0.861855 -0.490393 -0.0975452
+0.0283355 0.865562 -0.482963 -0.12941
+-0.0283356 0.865562 -0.473465 -0.16072
+-0.0848854 0.861855 -0.46194 -0.191342
+-0.141072 0.854458 -0.448436 -0.221144
+-0.196654 0.843402 -0.433013 -0.25
+-0.251394 0.828735 -0.415735 -0.277785
+-0.305058 0.810518 -0.396677 -0.304381
+-0.357415 0.788831 -0.37592 -0.329673
+-0.408241 0.763766 -0.353553 -0.353553
+-0.45732 0.735431 -0.329673 -0.37592
+-0.504441 0.703946 -0.304381 -0.396677
+-0.549401 0.669447 -0.277785 -0.415735
+-0.592008 0.632081 -0.25 -0.433013
+-0.632081 0.592008 -0.221144 -0.448436
+-0.669447 0.549401 -0.191342 -0.46194
+-0.703946 0.504441 -0.16072 -0.473465
+-0.735431 0.45732 -0.12941 -0.482963
+-0.763766 0.408242 -0.0975452 -0.490393
+-0.788831 0.357415 -0.0652631 -0.495722
+-0.810518 0.305057 -0.0327015 -0.498929
+-0.828735 0.251394 -1.62659e-08 -0.5
+-0.843402 0.196654 0.0327016 -0.498929
+-0.854458 0.141072 0.0652631 -0.495722
+-0.861855 0.0848853 0.0975452 -0.490393
+-0.865562 0.0283356 0.129409 -0.482963
+0.88928 0.029112 -0.188374 0.415751
+0.885472 0.0872114 -0.215162 0.40254
+0.877872 0.144937 -0.241029 0.387606
+0.866513 0.202043 -0.265863 0.371012
+0.851444 0.258283 -0.28956 0.352829
+0.832728 0.313417 -0.312016 0.333136
+0.810447 0.367209 -0.333136 0.312016
+0.784695 0.419428 -0.352829 0.28956
+0.755583 0.469852 -0.371012 0.265863
+0.723236 0.518263 -0.387606 0.241029
+0.687791 0.564456 -0.40254 0.215162
+0.649401 0.608231 -0.415751 0.188374
+0.608231 0.649401 -0.427181 0.160779
+0.564456 0.687791 -0.436782 0.132496
+0.518263 0.723236 -0.444512 0.103646
+0.469852 0.755583 -0.450339 0.0743512
+0.419428 0.784695 -0.454238 0.0447385
+0.367209 0.810447 -0.456191 0.0149341
+0.313417 0.832728 -0.456191 -0.0149342
+0.258283 0.851444 -0.454238 -0.0447385
+0.202043 0.866513 -0.450339 -0.0743513
+0.144937 0.877872 -0.444512 -0.103646
+0.0872113 0.885472 -0.436781 -0.132496
+0.0291119 0.88928 -0.427181 -0.160779
+-0.0291121 0.88928 -0.415751 -0.188374
+-0.0872115 0.885472 -0.40254 -0.215162
+-0.144937 0.877872 -0.387606 -0.241029
+-0.202043 0.866513 -0.371012 -0.265863
+-0.258283 0.851444 -0.352829 -0.28956
+-0.313417 0.832728 -0.333136 -0.312016
+-0.367209 0.810447 -0.312016 -0.333136
+-0.419428 0.784695 -0.28956 -0.352829
+-0.469852 0.755583 -0.265863 -0.371012
+-0.518263 0.723236 -0.241029 -0.387606
+-0.564456 0.687791 -0.215162 -0.40254
+-0.608231 0.649401 -0.188374 -0.415751
+-0.649401 0.608231 -0.160779 -0.427181
+-0.687791 0.564456 -0.132496 -0.436782
+-0.723236 0.518263 -0.103646 -0.444512
+-0.755583 0.469852 -0.0743513 -0.450339
+-0.784695 0.419428 -0.0447386 -0.454238
+-0.810447 0.367209 -0.0149342 -0.456191
+-0.832728 0.313417 0.0149342 -0.456191
+-0.851444 0.258283 0.0447385 -0.454238
+-0.866513 0.202043 0.0743513 -0.450339
+-0.877872 0.144937 0.103646 -0.444512
+-0.885472 0.0872113 0.132496 -0.436782
+-0.88928 0.0291121 0.160779 -0.427181
+0.88928 0.029112 -0.265863 0.371012
+0.885472 0.0872114 -0.28956 0.352829
+0.877872 0.144937 -0.312016 0.333136
+0.866513 0.202043 -0.333136 0.312016
+0.851444 0.258283 -0.352829 0.28956
+0.832728 0.313417 -0.371012 0.265863
+0.810447 0.367209 -0.387606 0.241029
+0.784695 0.419428 -0.40254 0.215162
+0.755583 0.469852 -0.415751 0.188374
+0.723236 0.518263 -0.427181 0.160779
+0.687791 0.564456 -0.436782 0.132496
+0.649401 0.608231 -0.444512 0.103646
+0.608231 0.649401 -0.450339 0.0743513
+0.564456 0.687791 -0.454238 0.0447385
+0.518263 0.723236 -0.456191 0.0149341
+0.469852 0.755583 -0.456191 -0.0149342
+0.419428 0.784695 -0.454238 -0.0447385
+0.367209 0.810447 -0.450339 -0.0743513
+0.313417 0.832728 -0.444512 -0.103646
+0.258283 0.851444 -0.436782 -0.132496
+0.202043 0.866513 -0.427181 -0.160779
+0.144937 0.877872 -0.415751 -0.188374
+0.0872113 0.885472 -0.40254 -0.215162
+0.0291119 0.88928 -0.387606 -0.241029
+-0.0291121 0.88928 -0.371012 -0.265864
+-0.0872115 0.885472 -0.352829 -0.28956
+-0.144937 0.877872 -0.333136 -0.312016
+-0.202043 0.866513 -0.312016 -0.333136
+-0.258283 0.851444 -0.28956 -0.352829
+-0.313417 0.832728 -0.265863 -0.371012
+-0.367209 0.810447 -0.241029 -0.387606
+-0.419428 0.784695 -0.215162 -0.40254
+-0.469852 0.755583 -0.188374 -0.415751
+-0.518263 0.723236 -0.160779 -0.427181
+-0.564456 0.687791 -0.132496 -0.436782
+-0.608231 0.649401 -0.103646 -0.444512
+-0.649401 0.608231 -0.0743513 -0.450339
+-0.687791 0.564456 -0.0447385 -0.454238
+-0.723236 0.518263 -0.0149342 -0.456191
+-0.755583 0.469852 0.0149342 -0.456191
+-0.784695 0.419428 0.0447384 -0.454238
+-0.810447 0.367209 0.0743513 -0.450339
+-0.832728 0.313417 0.103646 -0.444512
+-0.851444 0.258283 0.132496 -0.436782
+-0.866513 0.202043 0.160779 -0.427181
+-0.877872 0.144937 0.188374 -0.415751
+-0.885472 0.0872113 0.215162 -0.40254
+-0.88928 0.0291121 0.241029 -0.387606
+0.912382 0.0298683 -0.269177 0.306937
+0.908475 0.089477 -0.288675 0.288675
+0.900678 0.148703 -0.306937 0.269177
+0.889024 0.207291 -0.323885 0.248526
+0.873563 0.264992 -0.339446 0.226811
+0.854361 0.321559 -0.353553 0.204124
+0.831501 0.376748 -0.366147 0.180564
+0.80508 0.430324 -0.377172 0.15623
+0.775212 0.482058 -0.386583 0.131227
+0.742024 0.531727 -0.394338 0.105662
+0.705659 0.579119 -0.400404 0.0796453
+0.666272 0.624032 -0.404756 0.0532871
+0.624032 0.666272 -0.407374 0.0267007
+0.579119 0.705659 -0.408248 -9.12808e-10
+0.531727 0.742024 -0.407374 -0.0267007
+0.482058 0.775212 -0.404756 -0.0532871
+0.430324 0.80508 -0.400404 -0.0796453
+0.376748 0.831501 -0.394338 -0.105662
+0.321559 0.854361 -0.386583 -0.131227
+0.264992 0.873563 -0.377172 -0.15623
+0.207291 0.889024 -0.366147 -0.180564
+0.148702 0.900678 -0.353553 -0.204124
+0.0894769 0.908475 -0.339446 -0.226811
+0.0298682 0.912382 -0.323885 -0.248526
+-0.0298684 0.912382 -0.306937 -0.269177
+-0.0894771 0.908475 -0.288675 -0.288675
+-0.148703 0.900678 -0.269177 -0.306937
+-0.207291 0.889024 -0.248526 -0.323885
+-0.264993 0.873563 -0.226811 -0.339446
+-0.321559 0.854361 -0.204124 -0.353553
+-0.376748 0.831501 -0.180564 -0.366147
+-0.430324 0.80508 -0.15623 -0.377172
+-0.482058 0.775212 -0.131227 -0.386583
+-0.531727 0.742024 -0.105662 -0.394338
+-0.579119 0.705659 -0.0796453 -0.400404
+-0.624032 0.666272 -0.0532871 -0.404756
+-0.666272 0.624032 -0.0267007 -0.407374
+-0.705659 0.579119 1.87579e-08 -0.408248
+-0.742024 0.531727 0.0267007 -0.407374
+-0.775212 0.482058 0.0532871 -0.404756
+-0.80508 0.430324 0.0796452 -0.400404
+-0.831501 0.376748 0.105662 -0.394338
+-0.854361 0.321559 0.131227 -0.386583
+-0.873563 0.264992 0.15623 -0.377172
+-0.889024 0.207291 0.180564 -0.366147
+-0.900678 0.148703 0.204124 -0.353553
+-0.908475 0.089477 0.226811 -0.339446
+-0.912382 0.0298684 0.248526 -0.323885
+0.912382 0.0298683 -0.204124 0.353553
+0.908475 0.089477 -0.226811 0.339446
+0.900678 0.148703 -0.248526 0.323885
+0.889024 0.207291 -0.269177 0.306937
+0.873563 0.264992 -0.288675 0.288675
+0.854361 0.321559 -0.306937 0.269177
+0.831501 0.376748 -0.323885 0.248526
+0.80508 0.430324 -0.339446 0.226811
+0.775212 0.482058 -0.353553 0.204124
+0.742024 0.531727 -0.366147 0.180564
+0.705659 0.579119 -0.377172 0.15623
+0.666272 0.624032 -0.386583 0.131227
+0.624032 0.666272 -0.394338 0.105662
+0.579119 0.705659 -0.400404 0.0796453
+0.531727 0.742024 -0.404756 0.0532871
+0.482058 0.775212 -0.407374 0.0267007
+0.430324 0.80508 -0.408248 -3.35603e-08
+0.376748 0.831501 -0.407374 -0.0267007
+0.321559 0.854361 -0.404756 -0.0532871
+0.264992 0.873563 -0.400404 -0.0796453
+0.207291 0.889024 -0.394338 -0.105662
+0.148702 0.900678 -0.386583 -0.131227
+0.0894769 0.908475 -0.377172 -0.15623
+0.0298682 0.912382 -0.366147 -0.180564
+-0.0298684 0.912382 -0.353553 -0.204124
+-0.0894771 0.908475 -0.339446 -0.226811
+-0.148703 0.900678 -0.323885 -0.248526
+-0.207291 0.889024 -0.306937 -0.269177
+-0.264993 0.873563 -0.288675 -0.288675
+-0.321559 0.854361 -0.269177 -0.306937
+-0.376748 0.831501 -0.248526 -0.323885
+-0.430324 0.80508 -0.226811 -0.339446
+-0.482058 0.775212 -0.204124 -0.353553
+-0.531727 0.742024 -0.180564 -0.366147
+-0.579119 0.705659 -0.15623 -0.377172
+-0.624032 0.666272 -0.131227 -0.386583
+-0.666272 0.624032 -0.105662 -0.394338
+-0.705659 0.579119 -0.0796453 -0.400404
+-0.742024 0.531727 -0.0532871 -0.404756
+-0.775212 0.482058 -0.0267007 -0.407374
+-0.80508 0.430324 -4.59286e-08 -0.408248
+-0.831501 0.376748 0.0267007 -0.407374
+-0.854361 0.321559 0.0532871 -0.404756
+-0.873563 0.264992 0.0796453 -0.400404
+-0.889024 0.207291 0.105662 -0.394338
+-0.900678 0.148703 0.131227 -0.386583
+-0.908475 0.089477 0.15623 -0.377172
+-0.912382 0.0298684 0.180564 -0.366147
+0.933521 0.0305603 -0.199846 0.296084
+0.929524 0.0915501 -0.218783 0.28238
+0.921546 0.152148 -0.236783 0.267466
+0.909622 0.212094 -0.253769 0.251407
+0.893803 0.271132 -0.269668 0.234272
+0.874156 0.329009 -0.284413 0.216133
+0.850766 0.385477 -0.29794 0.197069
+0.823733 0.440295 -0.310191 0.17716
+0.793173 0.493227 -0.321114 0.156494
+0.759216 0.544047 -0.330661 0.135157
+0.722008 0.592537 -0.338793 0.113241
+0.681709 0.63849 -0.345474 0.0908405
+0.63849 0.681709 -0.350675 0.068051
+0.592537 0.722008 -0.354375 0.04497
+0.544047 0.759216 -0.356558 0.0216964
+0.493227 0.793173 -0.357213 -0.00167001
+0.440295 0.823733 -0.356339 -0.0250293
+0.385477 0.850766 -0.353939 -0.0482814
+0.329009 0.874156 -0.350024 -0.0713268
+0.271132 0.893803 -0.344609 -0.0940667
+0.212094 0.909622 -0.337719 -0.116404
+0.152148 0.921546 -0.329383 -0.138243
+0.09155 0.929524 -0.319636 -0.159489
+0.0305602 0.933521 -0.308521 -0.180053
+-0.0305604 0.933521 -0.296084 -0.199846
+-0.0915502 0.929524 -0.28238 -0.218783
+-0.152148 0.921546 -0.267466 -0.236783
+-0.212094 0.909622 -0.251407 -0.253769
+-0.271132 0.893803 -0.234272 -0.269668
+-0.329009 0.874156 -0.216133 -0.284413
+-0.385477 0.850766 -0.197069 -0.29794
+-0.440295 0.823733 -0.17716 -0.310191
+-0.493227 0.793173 -0.156494 -0.321114
+-0.544047 0.759216 -0.135157 -0.330661
+-0.592537 0.722008 -0.113241 -0.338793
+-0.63849 0.681709 -0.0908405 -0.345474
+-0.681709 0.63849 -0.068051 -0.350675
+-0.722008 0.592537 -0.04497 -0.354375
+-0.759216 0.544047 -0.0216965 -0.356558
+-0.793173 0.493227 0.00166999 -0.357213
+-0.823733 0.440295 0.0250292 -0.356339
+-0.850766 0.385477 0.0482814 -0.353939
+-0.874156 0.329009 0.0713268 -0.350024
+-0.893803 0.271132 0.0940667 -0.344609
+-0.909622 0.212094 0.116404 -0.337719
+-0.921546 0.152148 0.138242 -0.329383
+-0.929524 0.0915501 0.159489 -0.319636
+-0.933521 0.0305604 0.180053 -0.308521
+0.88928 0.029112 -0.103646 0.444512
+0.885472 0.0872114 -0.132496 0.436782
+0.877872 0.144937 -0.160779 0.427181
+0.866513 0.202043 -0.188374 0.415751
+0.851444 0.258283 -0.215162 0.40254
+0.832728 0.313417 -0.241029 0.387606
+0.810447 0.367209 -0.265863 0.371012
+0.784695 0.419428 -0.28956 0.352829
+0.755583 0.469852 -0.312016 0.333136
+0.723236 0.518263 -0.333136 0.312016
+0.687791 0.564456 -0.352829 0.28956
+0.649401 0.608231 -0.371012 0.265863
+0.608231 0.649401 -0.387606 0.241029
+0.564456 0.687791 -0.40254 0.215162
+0.518263 0.723236 -0.415751 0.188374
+0.469852 0.755583 -0.427181 0.160779
+0.419428 0.784695 -0.436782 0.132496
+0.367209 0.810447 -0.444512 0.103646
+0.313417 0.832728 -0.450339 0.0743512
+0.258283 0.851444 -0.454238 0.0447385
+0.202043 0.866513 -0.456191 0.0149341
+0.144937 0.877872 -0.456191 -0.0149342
+0.0872113 0.885472 -0.454238 -0.0447386
+0.0291119 0.88928 -0.450339 -0.0743513
+-0.0291121 0.88928 -0.444512 -0.103646
+-0.0872115 0.885472 -0.436781 -0.132496
+-0.144937 0.877872 -0.427181 -0.160779
+-0.202043 0.866513 -0.415751 -0.188374
+-0.258283 0.851444 -0.40254 -0.215162
+-0.313417 0.832728 -0.387606 -0.241029
+-0.367209 0.810447 -0.371012 -0.265864
+-0.419428 0.784695 -0.352829 -0.28956
+-0.469852 0.755583 -0.333136 -0.312016
+-0.518263 0.723236 -0.312016 -0.333136
+-0.564456 0.687791 -0.28956 -0.352829
+-0.608231 0.649401 -0.265863 -0.371012
+-0.649401 0.608231 -0.241029 -0.387606
+-0.687791 0.564456 -0.215162 -0.40254
+-0.723236 0.518263 -0.188374 -0.415751
+-0.755583 0.469852 -0.160779 -0.427181
+-0.784695 0.419428 -0.132496 -0.436781
+-0.810447 0.367209 -0.103646 -0.444512
+-0.832728 0.313417 -0.0743512 -0.450339
+-0.851444 0.258283 -0.0447385 -0.454238
+-0.866513 0.202043 -0.0149341 -0.456191
+-0.877872 0.144937 0.0149341 -0.456191
+-0.885472 0.0872113 0.0447385 -0.454238
+-0.88928 0.0291121 0.0743512 -0.450339
+0.912382 0.0298683 -0.131227 0.386583
+0.908475 0.089477 -0.15623 0.377172
+0.900678 0.148703 -0.180564 0.366147
+0.889024 0.207291 -0.204124 0.353553
+0.873563 0.264992 -0.226811 0.339446
+0.854361 0.321559 -0.248526 0.323885
+0.831501 0.376748 -0.269177 0.306937
+0.80508 0.430324 -0.288675 0.288675
+0.775212 0.482058 -0.306937 0.269177
+0.742024 0.531727 -0.323885 0.248526
+0.705659 0.579119 -0.339446 0.226811
+0.666272 0.624032 -0.353553 0.204124
+0.624032 0.666272 -0.366147 0.180564
+0.579119 0.705659 -0.377172 0.15623
+0.531727 0.742024 -0.386583 0.131227
+0.482058 0.775212 -0.394338 0.105662
+0.430324 0.80508 -0.400404 0.0796453
+0.376748 0.831501 -0.404756 0.0532871
+0.321559 0.854361 -0.407374 0.0267007
+0.264992 0.873563 -0.408248 -1.75408e-08
+0.207291 0.889024 -0.407374 -0.0267007
+0.148702 0.900678 -0.404756 -0.0532871
+0.0894769 0.908475 -0.400404 -0.0796453
+0.0298682 0.912382 -0.394338 -0.105662
+-0.0298684 0.912382 -0.386583 -0.131227
+-0.0894771 0.908475 -0.377172 -0.15623
+-0.148703 0.900678 -0.366147 -0.180564
+-0.207291 0.889024 -0.353553 -0.204124
+-0.264993 0.873563 -0.339446 -0.226811
+-0.321559 0.854361 -0.323885 -0.248526
+-0.376748 0.831501 -0.306937 -0.269177
+-0.430324 0.80508 -0.288675 -0.288675
+-0.482058 0.775212 -0.269177 -0.306937
+-0.531727 0.742024 -0.248526 -0.323885
+-0.579119 0.705659 -0.226811 -0.339446
+-0.624032 0.666272 -0.204124 -0.353553
+-0.666272 0.624032 -0.180564 -0.366147
+-0.705659 0.579119 -0.15623 -0.377172
+-0.742024 0.531727 -0.131227 -0.386583
+-0.775212 0.482058 -0.105662 -0.394338
+-0.80508 0.430324 -0.0796453 -0.400404
+-0.831501 0.376748 -0.0532871 -0.404756
+-0.854361 0.321559 -0.0267007 -0.407374
+-0.873563 0.264992 -1.32811e-08 -0.408248
+-0.889024 0.207291 0.0267007 -0.407374
+-0.900678 0.148703 0.0532871 -0.404756
+-0.908475 0.089477 0.0796453 -0.400404
+-0.912382 0.0298684 0.105662 -0.394338
+0.912382 0.0298683 -0.0532871 0.404756
+0.908475 0.089477 -0.0796453 0.400404
+0.900678 0.148703 -0.105662 0.394338
+0.889024 0.207291 -0.131227 0.386583
+0.873563 0.264992 -0.15623 0.377172
+0.854361 0.321559 -0.180564 0.366147
+0.831501 0.376748 -0.204124 0.353553
+0.80508 0.430324 -0.226811 0.339446
+0.775212 0.482058 -0.248526 0.323885
+0.742024 0.531727 -0.269177 0.306937
+0.705659 0.579119 -0.288675 0.288675
+0.666272 0.624032 -0.306937 0.269177
+0.624032 0.666272 -0.323885 0.248526
+0.579119 0.705659 -0.339446 0.226811
+0.531727 0.742024 -0.353553 0.204124
+0.482058 0.775212 -0.366147 0.180564
+0.430324 0.80508 -0.377172 0.15623
+0.376748 0.831501 -0.386583 0.131227
+0.321559 0.854361 -0.394338 0.105662
+0.264992 0.873563 -0.400404 0.0796453
+0.207291 0.889024 -0.404756 0.0532871
+0.148702 0.900678 -0.407374 0.0267007
+0.0894769 0.908475 -0.408248 -5.01883e-08
+0.0298682 0.912382 -0.407374 -0.0267008
+-0.0298684 0.912382 -0.404756 -0.0532871
+-0.0894771 0.908475 -0.400404 -0.0796453
+-0.148703 0.900678 -0.394338 -0.105662
+-0.207291 0.889024 -0.386583 -0.131227
+-0.264993 0.873563 -0.377172 -0.15623
+-0.321559 0.854361 -0.366147 -0.180564
+-0.376748 0.831501 -0.353553 -0.204124
+-0.430324 0.80508 -0.339446 -0.226811
+-0.482058 0.775212 -0.323885 -0.248526
+-0.531727 0.742024 -0.306937 -0.269177
+-0.579119 0.705659 -0.288675 -0.288675
+-0.624032 0.666272 -0.269177 -0.306937
+-0.666272 0.624032 -0.248526 -0.323885
+-0.705659 0.579119 -0.226811 -0.339446
+-0.742024 0.531727 -0.204124 -0.353553
+-0.775212 0.482058 -0.180564 -0.366147
+-0.80508 0.430324 -0.15623 -0.377172
+-0.831501 0.376748 -0.131227 -0.386583
+-0.854361 0.321559 -0.105662 -0.394338
+-0.873563 0.264992 -0.0796453 -0.400404
+-0.889024 0.207291 -0.0532871 -0.404756
+-0.900678 0.148703 -0.0267007 -0.407374
+-0.908475 0.089477 1.93664e-08 -0.408248
+-0.912382 0.0298684 0.0267007 -0.407374
+0.933521 0.0305603 -0.0515886 0.353472
+0.929524 0.0915501 -0.0745963 0.349342
+0.921546 0.152148 -0.0972846 0.343715
+0.909622 0.212094 -0.119556 0.336616
+0.893803 0.271132 -0.141316 0.328076
+0.874156 0.329009 -0.162471 0.318131
+0.850766 0.385477 -0.18293 0.306824
+0.823733 0.440295 -0.202605 0.294203
+0.793173 0.493227 -0.221413 0.280322
+0.759216 0.544047 -0.239273 0.265241
+0.722008 0.592537 -0.256108 0.249023
+0.681709 0.63849 -0.271847 0.23174
+0.63849 0.681709 -0.286421 0.213464
+0.592537 0.722008 -0.299769 0.194274
+0.544047 0.759216 -0.311834 0.174252
+0.493227 0.793173 -0.322563 0.153484
+0.440295 0.823733 -0.33191 0.132059
+0.385477 0.850766 -0.339837 0.110068
+0.329009 0.874156 -0.346308 0.0876064
+0.271132 0.893803 -0.351296 0.0647692
+0.212094 0.909622 -0.35478 0.0416547
+0.152148 0.921546 -0.356745 0.0183617
+0.09155 0.929524 -0.357182 -0.00500984
+0.0305602 0.933521 -0.35609 -0.0283599
+-0.0305604 0.933521 -0.353472 -0.0515886
+-0.0915502 0.929524 -0.349342 -0.0745963
+-0.152148 0.921546 -0.343715 -0.0972847
+-0.212094 0.909622 -0.336616 -0.119556
+-0.271132 0.893803 -0.328076 -0.141316
+-0.329009 0.874156 -0.318131 -0.162471
+-0.385477 0.850766 -0.306824 -0.18293
+-0.440295 0.823733 -0.294203 -0.202605
+-0.493227 0.793173 -0.280322 -0.221413
+-0.544047 0.759216 -0.265241 -0.239273
+-0.592537 0.722008 -0.249023 -0.256108
+-0.63849 0.681709 -0.23174 -0.271847
+-0.681709 0.63849 -0.213464 -0.286421
+-0.722008 0.592537 -0.194274 -0.299769
+-0.759216 0.544047 -0.174252 -0.311834
+-0.793173 0.493227 -0.153484 -0.322563
+-0.823733 0.440295 -0.132059 -0.33191
+-0.850766 0.385477 -0.110069 -0.339837
+-0.874156 0.329009 -0.0876064 -0.346308
+-0.893803 0.271132 -0.0647693 -0.351296
+-0.909622 0.212094 -0.0416547 -0.35478
+-0.921546 0.152148 -0.0183618 -0.356745
+-0.929524 0.0915501 0.00500981 -0.357182
+-0.933521 0.0305604 0.0283599 -0.35609
+0.933521 0.0305603 -0.12895 0.333131
+0.929524 0.0915501 -0.150462 0.323984
+0.921546 0.152148 -0.171329 0.313449
+0.909622 0.212094 -0.191463 0.301573
+0.893803 0.271132 -0.210777 0.288405
+0.874156 0.329009 -0.229188 0.274002
+0.850766 0.385477 -0.246618 0.258425
+0.823733 0.440295 -0.262992 0.241743
+0.793173 0.493227 -0.278239 0.224025
+0.759216 0.544047 -0.292296 0.205347
+0.722008 0.592537 -0.3051 0.18579
+0.681709 0.63849 -0.316598 0.165438
+0.63849 0.681709 -0.32674 0.144377
+0.592537 0.722008 -0.335484 0.122698
+0.544047 0.759216 -0.34279 0.100494
+0.493227 0.793173 -0.348629 0.0778593
+0.440295 0.823733 -0.352975 0.0548912
+0.385477 0.850766 -0.355809 0.031688
+0.329009 0.874156 -0.35712 0.00834915
+0.271132 0.893803 -0.356901 -0.0150255
+0.212094 0.909622 -0.355154 -0.0383357
+0.152148 0.921546 -0.351887 -0.0614819
+0.09155 0.929524 -0.347112 -0.0843647
+0.0305602 0.933521 -0.340851 -0.106886
+-0.0305604 0.933521 -0.333131 -0.12895
+-0.0915502 0.929524 -0.323984 -0.150462
+-0.152148 0.921546 -0.313449 -0.171329
+-0.212094 0.909622 -0.301573 -0.191463
+-0.271132 0.893803 -0.288405 -0.210777
+-0.329009 0.874156 -0.274002 -0.229188
+-0.385477 0.850766 -0.258425 -0.246618
+-0.440295 0.823733 -0.241743 -0.262992
+-0.493227 0.793173 -0.224025 -0.278239
+-0.544047 0.759216 -0.205347 -0.292296
+-0.592537 0.722008 -0.18579 -0.3051
+-0.63849 0.681709 -0.165438 -0.316598
+-0.681709 0.63849 -0.144377 -0.32674
+-0.722008 0.592537 -0.122698 -0.335484
+-0.759216 0.544047 -0.100494 -0.34279
+-0.793173 0.493227 -0.0778593 -0.348629
+-0.823733 0.440295 -0.0548913 -0.352975
+-0.850766 0.385477 -0.031688 -0.355809
+-0.874156 0.329009 -0.00834914 -0.35712
+-0.893803 0.271132 0.0150254 -0.356901
+-0.909622 0.212094 0.0383358 -0.355154
+-0.921546 0.152148 0.0614818 -0.351887
+-0.929524 0.0915501 0.0843647 -0.347112
+-0.933521 0.0305604 0.106886 -0.340851
+0.951462 0.0311476 -0.126365 0.278894
+0.947388 0.0933095 -0.144335 0.270032
+0.939256 0.155072 -0.161687 0.260014
+0.927103 0.21617 -0.178347 0.248882
+0.91098 0.276343 -0.194242 0.236685
+0.890956 0.335332 -0.209307 0.223474
+0.867117 0.392885 -0.223474 0.209307
+0.839564 0.448756 -0.236685 0.194242
+0.808416 0.502706 -0.248882 0.178347
+0.773807 0.554502 -0.260014 0.161687
+0.735884 0.603924 -0.270032 0.144335
+0.69481 0.650761 -0.278894 0.126365
+0.65076 0.69481 -0.286561 0.107854
+0.603924 0.735884 -0.293002 0.0888812
+0.554502 0.773807 -0.298188 0.0695276
+0.502706 0.808416 -0.302097 0.0498763
+0.448756 0.839564 -0.304712 0.0300115
+0.392885 0.867117 -0.306022 0.0100181
+0.335332 0.890956 -0.306022 -0.0100181
+0.276343 0.91098 -0.304712 -0.0300115
+0.21617 0.927103 -0.302097 -0.0498764
+0.155072 0.939256 -0.298188 -0.0695277
+0.0933094 0.947388 -0.293002 -0.0888812
+0.0311475 0.951462 -0.286561 -0.107854
+-0.0311477 0.951462 -0.278894 -0.126365
+-0.0933096 0.947388 -0.270032 -0.144335
+-0.155072 0.939256 -0.260014 -0.161687
+-0.21617 0.927103 -0.248882 -0.178347
+-0.276343 0.91098 -0.236685 -0.194243
+-0.335332 0.890956 -0.223474 -0.209307
+-0.392886 0.867116 -0.209307 -0.223474
+-0.448756 0.839564 -0.194243 -0.236685
+-0.502706 0.808416 -0.178347 -0.248882
+-0.554502 0.773807 -0.161687 -0.260014
+-0.603924 0.735884 -0.144335 -0.270032
+-0.650761 0.69481 -0.126365 -0.278894
+-0.69481 0.650761 -0.107854 -0.286561
+-0.735884 0.603924 -0.0888812 -0.293002
+-0.773807 0.554502 -0.0695276 -0.298188
+-0.808416 0.502706 -0.0498763 -0.302097
+-0.839564 0.448756 -0.0300115 -0.304712
+-0.867117 0.392885 -0.0100181 -0.306022
+-0.890956 0.335332 0.0100182 -0.306022
+-0.91098 0.276343 0.0300115 -0.304712
+-0.927103 0.21617 0.0498764 -0.302097
+-0.939256 0.155072 0.0695276 -0.298188
+-0.947388 0.0933095 0.0888812 -0.293002
+-0.951462 0.0311477 0.107854 -0.286562
+0.951462 0.0311476 -0.0498763 0.302097
+0.947388 0.0933095 -0.0695276 0.298188
+0.939256 0.155072 -0.0888812 0.293002
+0.927103 0.21617 -0.107854 0.286561
+0.91098 0.276343 -0.126365 0.278894
+0.890956 0.335332 -0.144335 0.270032
+0.867117 0.392885 -0.161687 0.260014
+0.839564 0.448756 -0.178347 0.248882
+0.808416 0.502706 -0.194242 0.236685
+0.773807 0.554502 -0.209307 0.223474
+0.735884 0.603924 -0.223474 0.209307
+0.69481 0.650761 -0.236685 0.194242
+0.65076 0.69481 -0.248882 0.178347
+0.603924 0.735884 -0.260014 0.161687
+0.554502 0.773807 -0.270032 0.144335
+0.502706 0.808416 -0.278894 0.126365
+0.448756 0.839564 -0.286562 0.107854
+0.392885 0.867117 -0.293002 0.0888811
+0.335332 0.890956 -0.298188 0.0695276
+0.276343 0.91098 -0.302097 0.0498763
+0.21617 0.927103 -0.304712 0.0300115
+0.155072 0.939256 -0.306022 0.0100181
+0.0933094 0.947388 -0.306022 -0.0100182
+0.0311475 0.951462 -0.304712 -0.0300115
+-0.0311477 0.951462 -0.302097 -0.0498764
+-0.0933096 0.947388 -0.298188 -0.0695276
+-0.155072 0.939256 -0.293002 -0.0888812
+-0.21617 0.927103 -0.286561 -0.107854
+-0.276343 0.91098 -0.278894 -0.126365
+-0.335332 0.890956 -0.270032 -0.144335
+-0.392886 0.867116 -0.260014 -0.161687
+-0.448756 0.839564 -0.248882 -0.178347
+-0.502706 0.808416 -0.236685 -0.194242
+-0.554502 0.773807 -0.223474 -0.209307
+-0.603924 0.735884 -0.209307 -0.223474
+-0.650761 0.69481 -0.194242 -0.236685
+-0.69481 0.650761 -0.178347 -0.248882
+-0.735884 0.603924 -0.161687 -0.260014
+-0.773807 0.554502 -0.144335 -0.270032
+-0.808416 0.502706 -0.126365 -0.278894
+-0.839564 0.448756 -0.107854 -0.286561
+-0.867117 0.392885 -0.0888812 -0.293002
+-0.890956 0.335332 -0.0695276 -0.298188
+-0.91098 0.276343 -0.0498764 -0.302097
+-0.927103 0.21617 -0.0300115 -0.304712
+-0.939256 0.155072 -0.0100182 -0.306022
+-0.947388 0.0933095 0.0100181 -0.306022
+-0.951462 0.0311477 0.0300115 -0.304712
+0.966382 0.0316361 -0.0481394 0.250573
+0.962244 0.0947728 -0.0644245 0.246888
+0.953986 0.157504 -0.0804338 0.242146
+0.941642 0.21956 -0.0960987 0.236367
+0.925266 0.280676 -0.111352 0.229575
+0.904928 0.340591 -0.126129 0.221801
+0.880714 0.399046 -0.140365 0.213077
+0.85273 0.455794 -0.154 0.20344
+0.821094 0.510589 -0.166976 0.192933
+0.785942 0.563198 -0.179237 0.181599
+0.747424 0.613395 -0.190731 0.169487
+0.705706 0.660965 -0.201407 0.15665
+0.660965 0.705706 -0.211221 0.143142
+0.613395 0.747424 -0.220131 0.129021
+0.563198 0.785942 -0.228098 0.114348
+0.510589 0.821094 -0.235089 0.0991844
+0.455793 0.85273 -0.241072 0.0835965
+0.399046 0.880714 -0.246023 0.0676507
+0.340591 0.904928 -0.249921 0.0514151
+0.280676 0.925266 -0.252749 0.0349594
+0.21956 0.941642 -0.254494 0.018354
+0.157504 0.953986 -0.25515 0.00166994
+0.0947727 0.962244 -0.254713 -0.0150212
+0.031636 0.966382 -0.253185 -0.0316481
+-0.0316362 0.966382 -0.250573 -0.0481394
+-0.0947729 0.962244 -0.246888 -0.0644246
+-0.157504 0.953986 -0.242146 -0.0804339
+-0.21956 0.941642 -0.236367 -0.0960987
+-0.280676 0.925266 -0.229575 -0.111352
+-0.340591 0.904927 -0.221801 -0.126129
+-0.399047 0.880714 -0.213077 -0.140365
+-0.455793 0.85273 -0.203441 -0.154
+-0.510589 0.821094 -0.192933 -0.166976
+-0.563198 0.785941 -0.181599 -0.179237
+-0.613395 0.747424 -0.169487 -0.190731
+-0.660966 0.705706 -0.15665 -0.201407
+-0.705706 0.660966 -0.143142 -0.211221
+-0.747424 0.613395 -0.129021 -0.220131
+-0.785942 0.563198 -0.114348 -0.228098
+-0.821094 0.510589 -0.0991844 -0.235089
+-0.85273 0.455794 -0.0835966 -0.241072
+-0.880714 0.399046 -0.0676507 -0.246023
+-0.904928 0.340591 -0.0514151 -0.249921
+-0.925266 0.280676 -0.0349594 -0.252749
+-0.941642 0.21956 -0.018354 -0.254494
+-0.953986 0.157504 -0.00166999 -0.25515
+-0.962244 0.0947727 0.0150212 -0.254713
+-0.966382 0.0316362 0.031648 -0.253185
+0.933521 0.0305603 -0.26072 0.244191
+0.929524 0.0915501 -0.276133 0.226616
+0.921546 0.152148 -0.290363 0.208071
+0.909622 0.212094 -0.30335 0.188635
+0.893803 0.271132 -0.315037 0.168391
+0.874156 0.329009 -0.325376 0.147426
+0.850766 0.385477 -0.334322 0.12583
+0.823733 0.440295 -0.341836 0.103695
+0.793173 0.493227 -0.347886 0.0811156
+0.759216 0.544047 -0.352446 0.0581891
+0.722008 0.592537 -0.355497 0.0350134
+0.681709 0.63849 -0.357026 0.0116878
+0.63849 0.681709 -0.357026 -0.0116878
+0.592537 0.722008 -0.355497 -0.0350134
+0.544047 0.759216 -0.352446 -0.0581891
+0.493227 0.793173 -0.347886 -0.0811156
+0.440295 0.823733 -0.341836 -0.103695
+0.385477 0.850766 -0.334322 -0.12583
+0.329009 0.874156 -0.325376 -0.147426
+0.271132 0.893803 -0.315037 -0.168391
+0.212094 0.909622 -0.30335 -0.188635
+0.152148 0.921546 -0.290363 -0.208071
+0.09155 0.929524 -0.276133 -0.226616
+0.0305602 0.933521 -0.26072 -0.244191
+-0.0305604 0.933521 -0.244191 -0.26072
+-0.0915502 0.929524 -0.226616 -0.276133
+-0.152148 0.921546 -0.208071 -0.290363
+-0.212094 0.909622 -0.188635 -0.30335
+-0.271132 0.893803 -0.168391 -0.315038
+-0.329009 0.874156 -0.147426 -0.325376
+-0.385477 0.850766 -0.12583 -0.334322
+-0.440295 0.823733 -0.103695 -0.341836
+-0.493227 0.793173 -0.0811156 -0.347886
+-0.544047 0.759216 -0.058189 -0.352446
+-0.592537 0.722008 -0.0350134 -0.355497
+-0.63849 0.681709 -0.0116878 -0.357026
+-0.681709 0.63849 0.0116878 -0.357026
+-0.722008 0.592537 0.0350134 -0.355497
+-0.759216 0.544047 0.058189 -0.352446
+-0.793173 0.493227 0.0811156 -0.347886
+-0.823733 0.440295 0.103695 -0.341836
+-0.850766 0.385477 0.12583 -0.334322
+-0.874156 0.329009 0.147426 -0.325376
+-0.893803 0.271132 0.168391 -0.315037
+-0.909622 0.212094 0.188635 -0.30335
+-0.921546 0.152148 0.208071 -0.290363
+-0.929524 0.0915501 0.226616 -0.276133
+-0.933521 0.0305604 0.244191 -0.26072
+0.951462 0.0311476 -0.248882 0.178347
+0.947388 0.0933095 -0.260014 0.161687
+0.939256 0.155072 -0.270032 0.144335
+0.927103 0.21617 -0.278894 0.126365
+0.91098 0.276343 -0.286561 0.107854
+0.890956 0.335332 -0.293002 0.0888812
+0.867117 0.392885 -0.298188 0.0695276
+0.839564 0.448756 -0.302097 0.0498763
+0.808416 0.502706 -0.304712 0.0300115
+0.773807 0.554502 -0.306022 0.0100181
+0.735884 0.603924 -0.306022 -0.0100181
+0.69481 0.650761 -0.304712 -0.0300115
+0.65076 0.69481 -0.302097 -0.0498764
+0.603924 0.735884 -0.298188 -0.0695276
+0.554502 0.773807 -0.293002 -0.0888812
+0.502706 0.808416 -0.286561 -0.107854
+0.448756 0.839564 -0.278894 -0.126365
+0.392885 0.867117 -0.270032 -0.144335
+0.335332 0.890956 -0.260014 -0.161687
+0.276343 0.91098 -0.248882 -0.178347
+0.21617 0.927103 -0.236685 -0.194242
+0.155072 0.939256 -0.223474 -0.209307
+0.0933094 0.947388 -0.209307 -0.223474
+0.0311475 0.951462 -0.194242 -0.236685
+-0.0311477 0.951462 -0.178347 -0.248882
+-0.0933096 0.947388 -0.161687 -0.260014
+-0.155072 0.939256 -0.144335 -0.270032
+-0.21617 0.927103 -0.126365 -0.278894
+-0.276343 0.91098 -0.107854 -0.286562
+-0.335332 0.890956 -0.0888811 -0.293002
+-0.392886 0.867116 -0.0695276 -0.298188
+-0.448756 0.839564 -0.0498764 -0.302097
+-0.502706 0.808416 -0.0300115 -0.304712
+-0.554502 0.773807 -0.0100181 -0.306022
+-0.603924 0.735884 0.0100181 -0.306022
+-0.650761 0.69481 0.0300115 -0.304712
+-0.69481 0.650761 0.0498763 -0.302097
+-0.735884 0.603924 0.0695276 -0.298188
+-0.773807 0.554502 0.0888811 -0.293002
+-0.808416 0.502706 0.107854 -0.286561
+-0.839564 0.448756 0.126365 -0.278894
+-0.867117 0.392885 0.144335 -0.270032
+-0.890956 0.335332 0.161687 -0.260014
+-0.91098 0.276343 0.178347 -0.248882
+-0.927103 0.21617 0.194242 -0.236685
+-0.939256 0.155072 0.209307 -0.223474
+-0.947388 0.0933095 0.223474 -0.209307
+-0.951462 0.0311477 0.236685 -0.194243
+0.951462 0.0311476 -0.194242 0.236685
+0.947388 0.0933095 -0.209307 0.223474
+0.939256 0.155072 -0.223474 0.209307
+0.927103 0.21617 -0.236685 0.194242
+0.91098 0.276343 -0.248882 0.178347
+0.890956 0.335332 -0.260014 0.161687
+0.867117 0.392885 -0.270032 0.144335
+0.839564 0.448756 -0.278894 0.126365
+0.808416 0.502706 -0.286561 0.107854
+0.773807 0.554502 -0.293002 0.0888812
+0.735884 0.603924 -0.298188 0.0695276
+0.69481 0.650761 -0.302097 0.0498763
+0.65076 0.69481 -0.304712 0.0300115
+0.603924 0.735884 -0.306022 0.0100181
+0.554502 0.773807 -0.306022 -0.0100181
+0.502706 0.808416 -0.304712 -0.0300115
+0.448756 0.839564 -0.302097 -0.0498764
+0.392885 0.867117 -0.298188 -0.0695276
+0.335332 0.890956 -0.293002 -0.0888812
+0.276343 0.91098 -0.286561 -0.107854
+0.21617 0.927103 -0.278894 -0.126365
+0.155072 0.939256 -0.270032 -0.144335
+0.0933094 0.947388 -0.260014 -0.161687
+0.0311475 0.951462 -0.248882 -0.178347
+-0.0311477 0.951462 -0.236685 -0.194243
+-0.0933096 0.947388 -0.223474 -0.209307
+-0.155072 0.939256 -0.209307 -0.223474
+-0.21617 0.927103 -0.194242 -0.236685
+-0.276343 0.91098 -0.178347 -0.248882
+-0.335332 0.890956 -0.161687 -0.260014
+-0.392886 0.867116 -0.144335 -0.270032
+-0.448756 0.839564 -0.126365 -0.278894
+-0.502706 0.808416 -0.107854 -0.286561
+-0.554502 0.773807 -0.0888811 -0.293002
+-0.603924 0.735884 -0.0695276 -0.298188
+-0.650761 0.69481 -0.0498763 -0.302097
+-0.69481 0.650761 -0.0300115 -0.304712
+-0.735884 0.603924 -0.0100181 -0.306022
+-0.773807 0.554502 0.0100181 -0.306022
+-0.808416 0.502706 0.0300115 -0.304712
+-0.839564 0.448756 0.0498763 -0.302097
+-0.867117 0.392885 0.0695276 -0.298188
+-0.890956 0.335332 0.0888812 -0.293002
+-0.91098 0.276343 0.107854 -0.286561
+-0.927103 0.21617 0.126365 -0.278894
+-0.939256 0.155072 0.144335 -0.270032
+-0.947388 0.0933095 0.161687 -0.260014
+-0.951462 0.0311477 0.178347 -0.248882
+0.966382 0.0316361 -0.186229 0.174422
+0.962244 0.0947728 -0.197238 0.161869
+0.953986 0.157504 -0.207402 0.148622
+0.941642 0.21956 -0.216678 0.134739
+0.925266 0.280676 -0.225027 0.120279
+0.904928 0.340591 -0.232412 0.105304
+0.880714 0.399046 -0.238801 0.0898784
+0.85273 0.455794 -0.244168 0.0740676
+0.821094 0.510589 -0.24849 0.0579397
+0.785942 0.563198 -0.251747 0.0415636
+0.747424 0.613395 -0.253927 0.0250096
+0.705706 0.660965 -0.255019 0.00834844
+0.660965 0.705706 -0.255019 -0.00834845
+0.613395 0.747424 -0.253927 -0.0250096
+0.563198 0.785942 -0.251747 -0.0415636
+0.510589 0.821094 -0.24849 -0.0579397
+0.455793 0.85273 -0.244168 -0.0740677
+0.399046 0.880714 -0.238801 -0.0898784
+0.340591 0.904928 -0.232412 -0.105304
+0.280676 0.925266 -0.225027 -0.120279
+0.21956 0.941642 -0.216678 -0.134739
+0.157504 0.953986 -0.207402 -0.148622
+0.0947727 0.962244 -0.197238 -0.161869
+0.031636 0.966382 -0.186229 -0.174422
+-0.0316362 0.966382 -0.174422 -0.186229
+-0.0947729 0.962244 -0.161869 -0.197238
+-0.157504 0.953986 -0.148622 -0.207402
+-0.21956 0.941642 -0.134739 -0.216678
+-0.280676 0.925266 -0.120279 -0.225027
+-0.340591 0.904927 -0.105304 -0.232412
+-0.399047 0.880714 -0.0898784 -0.238801
+-0.455793 0.85273 -0.0740677 -0.244168
+-0.510589 0.821094 -0.0579397 -0.24849
+-0.563198 0.785941 -0.0415636 -0.251747
+-0.613395 0.747424 -0.0250096 -0.253927
+-0.660966 0.705706 -0.00834843 -0.255019
+-0.705706 0.660966 0.00834843 -0.255019
+-0.747424 0.613395 0.0250096 -0.253927
+-0.785942 0.563198 0.0415636 -0.251747
+-0.821094 0.510589 0.0579397 -0.24849
+-0.85273 0.455794 0.0740676 -0.244168
+-0.880714 0.399046 0.0898784 -0.238801
+-0.904928 0.340591 0.105304 -0.232412
+-0.925266 0.280676 0.120279 -0.225027
+-0.941642 0.21956 0.134739 -0.216678
+-0.953986 0.157504 0.148622 -0.207402
+-0.962244 0.0947727 0.161869 -0.197238
+-0.966382 0.0316362 0.174422 -0.186229
+0.966382 0.0316361 -0.231013 0.108337
+0.962244 0.0947728 -0.237604 0.0929965
+0.953986 0.157504 -0.243178 0.0772574
+0.941642 0.21956 -0.24771 0.0611873
+0.925266 0.280676 -0.251182 0.0448553
+0.904928 0.340591 -0.253577 0.0283312
+0.880714 0.399046 -0.254887 0.0116858
+0.85273 0.455794 -0.255106 -0.00500964
+0.821094 0.510589 -0.254232 -0.0216836
+0.785942 0.563198 -0.25227 -0.0382648
+0.747424 0.613395 -0.249227 -0.0546821
+0.705706 0.660965 -0.245117 -0.0708652
+0.660965 0.705706 -0.239957 -0.0867449
+0.613395 0.747424 -0.23377 -0.102253
+0.563198 0.785942 -0.226582 -0.117324
+0.510589 0.821094 -0.218423 -0.131891
+0.455793 0.85273 -0.20933 -0.145895
+0.399046 0.880714 -0.19934 -0.159273
+0.340591 0.904928 -0.188496 -0.17197
+0.280676 0.925266 -0.176845 -0.18393
+0.21956 0.941642 -0.164437 -0.195102
+0.157504 0.953986 -0.151324 -0.205439
+0.0947727 0.962244 -0.137564 -0.214896
+0.031636 0.966382 -0.123215 -0.223433
+-0.0316362 0.966382 -0.108337 -0.231013
+-0.0947729 0.962244 -0.0929965 -0.237604
+-0.157504 0.953986 -0.0772573 -0.243178
+-0.21956 0.941642 -0.0611873 -0.24771
+-0.280676 0.925266 -0.0448553 -0.251182
+-0.340591 0.904927 -0.0283312 -0.253577
+-0.399047 0.880714 -0.0116858 -0.254887
+-0.455793 0.85273 0.00500961 -0.255106
+-0.510589 0.821094 0.0216836 -0.254232
+-0.563198 0.785941 0.0382648 -0.25227
+-0.613395 0.747424 0.0546821 -0.249227
+-0.660966 0.705706 0.0708652 -0.245117
+-0.705706 0.660966 0.0867449 -0.239957
+-0.747424 0.613395 0.102253 -0.23377
+-0.785942 0.563198 0.117323 -0.226582
+-0.821094 0.510589 0.131891 -0.218423
+-0.85273 0.455794 0.145895 -0.20933
+-0.880714 0.399046 0.159273 -0.19934
+-0.904928 0.340591 0.17197 -0.188496
+-0.925266 0.280676 0.18393 -0.176845
+-0.941642 0.21956 0.195102 -0.164437
+-0.953986 0.157504 0.205439 -0.151324
+-0.962244 0.0947727 0.214896 -0.137564
+-0.966382 0.0316362 0.223433 -0.123215
+0.978421 0.0320302 -0.201398 0.0332509
+0.974231 0.0959534 -0.203141 0.0200077
+0.96587 0.159466 -0.204015 0.00667876
+0.953372 0.222295 -0.204015 -0.00667876
+0.936792 0.284173 -0.203141 -0.0200077
+0.9162 0.344833 -0.201398 -0.0332509
+0.891686 0.404017 -0.198792 -0.0463517
+0.863352 0.461472 -0.195335 -0.0592541
+0.831322 0.516949 -0.191041 -0.0719027
+0.795732 0.570214 -0.185929 -0.0842435
+0.756735 0.621036 -0.180021 -0.0962235
+0.714497 0.669199 -0.173343 -0.107791
+0.669199 0.714497 -0.165922 -0.118898
+0.621036 0.756735 -0.15779 -0.129495
+0.570214 0.795732 -0.148983 -0.139538
+0.516949 0.831322 -0.139538 -0.148983
+0.461471 0.863352 -0.129495 -0.15779
+0.404017 0.891686 -0.118898 -0.165922
+0.344833 0.9162 -0.107791 -0.173343
+0.284173 0.936792 -0.0962234 -0.180021
+0.222295 0.953372 -0.0842435 -0.185929
+0.159466 0.96587 -0.0719027 -0.191041
+0.0959533 0.974231 -0.0592541 -0.195335
+0.0320301 0.978421 -0.0463517 -0.198792
+-0.0320303 0.978421 -0.0332509 -0.201398
+-0.0959535 0.974231 -0.0200076 -0.203141
+-0.159466 0.96587 -0.00667874 -0.204015
+-0.222295 0.953372 0.00667877 -0.204015
+-0.284173 0.936792 0.0200077 -0.203141
+-0.344834 0.9162 0.0332509 -0.201398
+-0.404018 0.891686 0.0463518 -0.198792
+-0.461471 0.863352 0.0592541 -0.195335
+-0.516949 0.831322 0.0719027 -0.191041
+-0.570214 0.795732 0.0842435 -0.185929
+-0.621036 0.756735 0.0962234 -0.180021
+-0.669199 0.714497 0.107791 -0.173343
+-0.714497 0.669199 0.118898 -0.165922
+-0.756735 0.621036 0.129495 -0.15779
+-0.795732 0.570214 0.139538 -0.148983
+-0.831322 0.516949 0.148983 -0.139538
+-0.863352 0.461472 0.15779 -0.129495
+-0.891686 0.404017 0.165922 -0.118898
+-0.9162 0.344833 0.173343 -0.107791
+-0.936792 0.284173 0.180021 -0.0962235
+-0.953372 0.222295 0.185929 -0.0842435
+-0.96587 0.159466 0.191041 -0.0719028
+-0.974231 0.0959533 0.195335 -0.0592541
+-0.978421 0.0320303 0.198792 -0.0463518
+0.978421 0.0320302 -0.173343 0.107791
+0.974231 0.0959534 -0.180021 0.0962235
+0.96587 0.159466 -0.185929 0.0842435
+0.953372 0.222295 -0.191041 0.0719027
+0.936792 0.284173 -0.195335 0.0592541
+0.9162 0.344833 -0.198792 0.0463518
+0.891686 0.404017 -0.201398 0.0332509
+0.863352 0.461472 -0.203141 0.0200077
+0.831322 0.516949 -0.204015 0.00667875
+0.795732 0.570214 -0.204015 -0.00667875
+0.756735 0.621036 -0.203141 -0.0200077
+0.714497 0.669199 -0.201398 -0.0332509
+0.669199 0.714497 -0.198792 -0.0463518
+0.621036 0.756735 -0.195335 -0.0592541
+0.570214 0.795732 -0.191041 -0.0719027
+0.516949 0.831322 -0.185929 -0.0842435
+0.461471 0.863352 -0.180021 -0.0962235
+0.404017 0.891686 -0.173343 -0.107791
+0.344833 0.9162 -0.165922 -0.118898
+0.284173 0.936792 -0.15779 -0.129495
+0.222295 0.953372 -0.148983 -0.139538
+0.159466 0.96587 -0.139538 -0.148983
+0.0959533 0.974231 -0.129495 -0.15779
+0.0320301 0.978421 -0.118898 -0.165922
+-0.0320303 0.978421 -0.107791 -0.173343
+-0.0959535 0.974231 -0.0962234 -0.180021
+-0.159466 0.96587 -0.0842435 -0.185929
+-0.222295 0.953372 -0.0719027 -0.191041
+-0.284173 0.936792 -0.0592541 -0.195335
+-0.344834 0.9162 -0.0463517 -0.198792
+-0.404018 0.891686 -0.0332509 -0.201398
+-0.461471 0.863352 -0.0200077 -0.203141
+-0.516949 0.831322 -0.00667876 -0.204015
+-0.570214 0.795732 0.00667877 -0.204015
+-0.621036 0.756735 0.0200077 -0.203141
+-0.669199 0.714497 0.0332509 -0.201398
+-0.714497 0.669199 0.0463517 -0.198792
+-0.756735 0.621036 0.0592541 -0.195335
+-0.795732 0.570214 0.0719027 -0.191041
+-0.831322 0.516949 0.0842435 -0.185929
+-0.863352 0.461472 0.0962234 -0.180021
+-0.891686 0.404017 0.107791 -0.173343
+-0.9162 0.344833 0.118898 -0.165922
+-0.936792 0.284173 0.129495 -0.15779
+-0.953372 0.222295 0.139538 -0.148983
+-0.96587 0.159466 0.148983 -0.139538
+-0.974231 0.0959533 0.15779 -0.129495
+-0.978421 0.0320303 0.165922 -0.118898
+0.987683 0.0323334 -0.149094 0.0347638
+0.983453 0.0968617 -0.151048 0.0249382
+0.975013 0.160975 -0.152356 0.0150057
+0.962397 0.224399 -0.153011 0.00500906
+0.94566 0.286863 -0.153011 -0.00500907
+0.924873 0.348098 -0.152356 -0.0150057
+0.900126 0.407842 -0.151048 -0.0249382
+0.871525 0.46584 -0.149094 -0.0347638
+0.839192 0.521843 -0.146501 -0.0444406
+0.803265 0.575611 -0.143281 -0.0539271
+0.763898 0.626915 -0.139447 -0.0631826
+0.72126 0.675534 -0.135016 -0.0721676
+0.675534 0.72126 -0.130007 -0.0808436
+0.626915 0.763898 -0.124441 -0.0891733
+0.575611 0.803265 -0.118343 -0.0971212
+0.521843 0.839192 -0.111737 -0.104653
+0.46584 0.871525 -0.104653 -0.111737
+0.407842 0.900126 -0.0971212 -0.118343
+0.348098 0.924873 -0.0891733 -0.124441
+0.286863 0.94566 -0.0808435 -0.130007
+0.224399 0.962397 -0.0721676 -0.135016
+0.160975 0.975013 -0.0631826 -0.139447
+0.0968616 0.983453 -0.053927 -0.143281
+0.0323333 0.987683 -0.0444406 -0.146501
+-0.0323335 0.987683 -0.0347638 -0.149094
+-0.0968618 0.983453 -0.0249382 -0.151048
+-0.160975 0.975013 -0.0150057 -0.152356
+-0.224399 0.962397 -0.00500906 -0.153011
+-0.286863 0.94566 0.00500909 -0.153011
+-0.348098 0.924873 0.0150058 -0.152356
+-0.407842 0.900126 0.0249382 -0.151048
+-0.46584 0.871525 0.0347638 -0.149094
+-0.521843 0.839192 0.0444406 -0.146501
+-0.575611 0.803265 0.0539271 -0.143281
+-0.626915 0.763898 0.0631826 -0.139447
+-0.675534 0.72126 0.0721676 -0.135016
+-0.72126 0.675534 0.0808435 -0.130007
+-0.763898 0.626915 0.0891733 -0.124441
+-0.803265 0.575611 0.0971212 -0.118343
+-0.839192 0.521843 0.104653 -0.111737
+-0.871525 0.46584 0.111737 -0.104653
+-0.900126 0.407842 0.118343 -0.0971212
+-0.924873 0.348098 0.124441 -0.0891733
+-0.94566 0.286863 0.130007 -0.0808436
+-0.962397 0.224399 0.135016 -0.0721676
+-0.975013 0.160975 0.139447 -0.0631826
+-0.983453 0.0968616 0.143281 -0.053927
+-0.987683 0.0323335 0.146501 -0.0444406
+0.966382 0.0316361 -0.123215 0.223433
+0.962244 0.0947728 -0.137564 0.214896
+0.953986 0.157504 -0.151324 0.205439
+0.941642 0.21956 -0.164437 0.195102
+0.925266 0.280676 -0.176845 0.18393
+0.904928 0.340591 -0.188496 0.17197
+0.880714 0.399046 -0.19934 0.159273
+0.85273 0.455794 -0.20933 0.145895
+0.821094 0.510589 -0.218423 0.131891
+0.785942 0.563198 -0.226582 0.117324
+0.747424 0.613395 -0.23377 0.102253
+0.705706 0.660965 -0.239957 0.0867449
+0.660965 0.705706 -0.245117 0.0708652
+0.613395 0.747424 -0.249227 0.0546821
+0.563198 0.785942 -0.25227 0.0382648
+0.510589 0.821094 -0.254232 0.0216836
+0.455793 0.85273 -0.255106 0.00500962
+0.399046 0.880714 -0.254887 -0.0116858
+0.340591 0.904928 -0.253577 -0.0283312
+0.280676 0.925266 -0.251182 -0.0448553
+0.21956 0.941642 -0.24771 -0.0611874
+0.157504 0.953986 -0.243178 -0.0772574
+0.0947727 0.962244 -0.237604 -0.0929966
+0.031636 0.966382 -0.231013 -0.108338
+-0.0316362 0.966382 -0.223433 -0.123215
+-0.0947729 0.962244 -0.214896 -0.137564
+-0.157504 0.953986 -0.205439 -0.151324
+-0.21956 0.941642 -0.195102 -0.164437
+-0.280676 0.925266 -0.18393 -0.176845
+-0.340591 0.904927 -0.171969 -0.188496
+-0.399047 0.880714 -0.159273 -0.19934
+-0.455793 0.85273 -0.145895 -0.20933
+-0.510589 0.821094 -0.131891 -0.218423
+-0.563198 0.785941 -0.117323 -0.226582
+-0.613395 0.747424 -0.102253 -0.23377
+-0.660966 0.705706 -0.0867449 -0.239957
+-0.705706 0.660966 -0.0708652 -0.245117
+-0.747424 0.613395 -0.0546821 -0.249227
+-0.785942 0.563198 -0.0382648 -0.25227
+-0.821094 0.510589 -0.0216836 -0.254232
+-0.85273 0.455794 -0.00500967 -0.255106
+-0.880714 0.399046 0.0116858 -0.254887
+-0.904928 0.340591 0.0283313 -0.253577
+-0.925266 0.280676 0.0448553 -0.251182
+-0.941642 0.21956 0.0611874 -0.24771
+-0.953986 0.157504 0.0772573 -0.243178
+-0.962244 0.0947727 0.0929965 -0.237604
+-0.966382 0.0316362 0.108337 -0.231013
+0.978421 0.0320302 -0.118898 0.165922
+0.974231 0.0959534 -0.129495 0.15779
+0.96587 0.159466 -0.139538 0.148983
+0.953372 0.222295 -0.148983 0.139538
+0.936792 0.284173 -0.15779 0.129495
+0.9162 0.344833 -0.165922 0.118898
+0.891686 0.404017 -0.173343 0.107791
+0.863352 0.461472 -0.180021 0.0962235
+0.831322 0.516949 -0.185929 0.0842435
+0.795732 0.570214 -0.191041 0.0719027
+0.756735 0.621036 -0.195335 0.0592541
+0.714497 0.669199 -0.198792 0.0463517
+0.669199 0.714497 -0.201398 0.0332509
+0.621036 0.756735 -0.203141 0.0200077
+0.570214 0.795732 -0.204015 0.00667874
+0.516949 0.831322 -0.204015 -0.00667877
+0.461471 0.863352 -0.203141 -0.0200077
+0.404017 0.891686 -0.201398 -0.0332509
+0.344833 0.9162 -0.198792 -0.0463518
+0.284173 0.936792 -0.195335 -0.0592541
+0.222295 0.953372 -0.191041 -0.0719027
+0.159466 0.96587 -0.185929 -0.0842435
+0.0959533 0.974231 -0.180021 -0.0962235
+0.0320301 0.978421 -0.173343 -0.107791
+-0.0320303 0.978421 -0.165922 -0.118898
+-0.0959535 0.974231 -0.15779 -0.129495
+-0.159466 0.96587 -0.148983 -0.139538
+-0.222295 0.953372 -0.139538 -0.148983
+-0.284173 0.936792 -0.129495 -0.15779
+-0.344834 0.9162 -0.118898 -0.165922
+-0.404018 0.891686 -0.107791 -0.173343
+-0.461471 0.863352 -0.0962235 -0.180021
+-0.516949 0.831322 -0.0842435 -0.185929
+-0.570214 0.795732 -0.0719027 -0.191041
+-0.621036 0.756735 -0.0592541 -0.195335
+-0.669199 0.714497 -0.0463517 -0.198792
+-0.714497 0.669199 -0.0332509 -0.201398
+-0.756735 0.621036 -0.0200077 -0.203141
+-0.795732 0.570214 -0.00667877 -0.204015
+-0.831322 0.516949 0.00667876 -0.204015
+-0.863352 0.461472 0.0200076 -0.203141
+-0.891686 0.404017 0.0332509 -0.201398
+-0.9162 0.344833 0.0463518 -0.198792
+-0.936792 0.284173 0.0592541 -0.195335
+-0.953372 0.222295 0.0719028 -0.191041
+-0.96587 0.159466 0.0842435 -0.185929
+-0.974231 0.0959533 0.0962235 -0.180021
+-0.978421 0.0320303 0.107791 -0.173343
+0.978421 0.0320302 -0.0463517 0.198792
+0.974231 0.0959534 -0.0592541 0.195335
+0.96587 0.159466 -0.0719027 0.191041
+0.953372 0.222295 -0.0842435 0.185929
+0.936792 0.284173 -0.0962235 0.180021
+0.9162 0.344833 -0.107791 0.173343
+0.891686 0.404017 -0.118898 0.165922
+0.863352 0.461472 -0.129495 0.15779
+0.831322 0.516949 -0.139538 0.148983
+0.795732 0.570214 -0.148983 0.139538
+0.756735 0.621036 -0.15779 0.129495
+0.714497 0.669199 -0.165922 0.118898
+0.669199 0.714497 -0.173343 0.107791
+0.621036 0.756735 -0.180021 0.0962235
+0.570214 0.795732 -0.185929 0.0842435
+0.516949 0.831322 -0.191041 0.0719027
+0.461471 0.863352 -0.195335 0.0592541
+0.404017 0.891686 -0.198792 0.0463517
+0.344833 0.9162 -0.201398 0.0332509
+0.284173 0.936792 -0.203141 0.0200077
+0.222295 0.953372 -0.204015 0.00667875
+0.159466 0.96587 -0.204015 -0.00667878
+0.0959533 0.974231 -0.203141 -0.0200077
+0.0320301 0.978421 -0.201398 -0.0332509
+-0.0320303 0.978421 -0.198792 -0.0463518
+-0.0959535 0.974231 -0.195335 -0.0592541
+-0.159466 0.96587 -0.191041 -0.0719028
+-0.222295 0.953372 -0.185929 -0.0842435
+-0.284173 0.936792 -0.180021 -0.0962235
+-0.344834 0.9162 -0.173343 -0.107791
+-0.404018 0.891686 -0.165922 -0.118898
+-0.461471 0.863352 -0.15779 -0.129495
+-0.516949 0.831322 -0.148983 -0.139538
+-0.570214 0.795732 -0.139538 -0.148983
+-0.621036 0.756735 -0.129495 -0.15779
+-0.669199 0.714497 -0.118898 -0.165922
+-0.714497 0.669199 -0.107791 -0.173343
+-0.756735 0.621036 -0.0962234 -0.180021
+-0.795732 0.570214 -0.0842435 -0.185929
+-0.831322 0.516949 -0.0719027 -0.191041
+-0.863352 0.461472 -0.0592541 -0.195335
+-0.891686 0.404017 -0.0463517 -0.198792
+-0.9162 0.344833 -0.0332509 -0.201398
+-0.936792 0.284173 -0.0200077 -0.203141
+-0.953372 0.222295 -0.00667874 -0.204015
+-0.96587 0.159466 0.00667874 -0.204015
+-0.974231 0.0959533 0.0200077 -0.203141
+-0.978421 0.0320303 0.0332509 -0.201398
+0.987683 0.0323334 -0.0444406 0.146501
+0.983453 0.0968617 -0.0539271 0.143281
+0.975013 0.160975 -0.0631826 0.139447
+0.962397 0.224399 -0.0721676 0.135016
+0.94566 0.286863 -0.0808435 0.130007
+0.924873 0.348098 -0.0891733 0.124441
+0.900126 0.407842 -0.0971212 0.118343
+0.871525 0.46584 -0.104653 0.111737
+0.839192 0.521843 -0.111737 0.104653
+0.803265 0.575611 -0.118343 0.0971212
+0.763898 0.626915 -0.124441 0.0891733
+0.72126 0.675534 -0.130007 0.0808435
+0.675534 0.72126 -0.135016 0.0721676
+0.626915 0.763898 -0.139447 0.0631826
+0.575611 0.803265 -0.143281 0.053927
+0.521843 0.839192 -0.146501 0.0444406
+0.46584 0.871525 -0.149094 0.0347638
+0.407842 0.900126 -0.151048 0.0249382
+0.348098 0.924873 -0.152356 0.0150057
+0.286863 0.94566 -0.153011 0.00500906
+0.224399 0.962397 -0.153011 -0.00500907
+0.160975 0.975013 -0.152356 -0.0150058
+0.0968616 0.983453 -0.151048 -0.0249382
+0.0323333 0.987683 -0.149094 -0.0347638
+-0.0323335 0.987683 -0.146501 -0.0444406
+-0.0968618 0.983453 -0.143281 -0.0539271
+-0.160975 0.975013 -0.139447 -0.0631826
+-0.224399 0.962397 -0.135016 -0.0721676
+-0.286863 0.94566 -0.130007 -0.0808436
+-0.348098 0.924873 -0.124441 -0.0891733
+-0.407842 0.900126 -0.118343 -0.0971213
+-0.46584 0.871525 -0.111737 -0.104653
+-0.521843 0.839192 -0.104653 -0.111737
+-0.575611 0.803265 -0.0971212 -0.118343
+-0.626915 0.763898 -0.0891733 -0.124441
+-0.675534 0.72126 -0.0808435 -0.130007
+-0.72126 0.675534 -0.0721676 -0.135016
+-0.763898 0.626915 -0.0631826 -0.139447
+-0.803265 0.575611 -0.0539271 -0.143281
+-0.839192 0.521843 -0.0444406 -0.146501
+-0.871525 0.46584 -0.0347638 -0.149094
+-0.900126 0.407842 -0.0249382 -0.151048
+-0.924873 0.348098 -0.0150057 -0.152356
+-0.94566 0.286863 -0.00500907 -0.153011
+-0.962397 0.224399 0.00500908 -0.153011
+-0.975013 0.160975 0.0150057 -0.152356
+-0.983453 0.0968616 0.0249382 -0.151048
+-0.987683 0.0323335 0.0347638 -0.149094
+0.987683 0.0323334 -0.111737 0.104653
+0.983453 0.0968617 -0.118343 0.0971212
+0.975013 0.160975 -0.124441 0.0891733
+0.962397 0.224399 -0.130007 0.0808435
+0.94566 0.286863 -0.135016 0.0721676
+0.924873 0.348098 -0.139447 0.0631826
+0.900126 0.407842 -0.143281 0.0539271
+0.871525 0.46584 -0.146501 0.0444406
+0.839192 0.521843 -0.149094 0.0347638
+0.803265 0.575611 -0.151048 0.0249382
+0.763898 0.626915 -0.152356 0.0150058
+0.72126 0.675534 -0.153011 0.00500906
+0.675534 0.72126 -0.153011 -0.00500907
+0.626915 0.763898 -0.152356 -0.0150057
+0.575611 0.803265 -0.151048 -0.0249382
+0.521843 0.839192 -0.149094 -0.0347638
+0.46584 0.871525 -0.146501 -0.0444406
+0.407842 0.900126 -0.143281 -0.0539271
+0.348098 0.924873 -0.139447 -0.0631826
+0.286863 0.94566 -0.135016 -0.0721676
+0.224399 0.962397 -0.130007 -0.0808436
+0.160975 0.975013 -0.124441 -0.0891733
+0.0968616 0.983453 -0.118343 -0.0971213
+0.0323333 0.987683 -0.111737 -0.104653
+-0.0323335 0.987683 -0.104653 -0.111737
+-0.0968618 0.983453 -0.0971212 -0.118343
+-0.160975 0.975013 -0.0891733 -0.124441
+-0.224399 0.962397 -0.0808435 -0.130007
+-0.286863 0.94566 -0.0721676 -0.135016
+-0.348098 0.924873 -0.0631826 -0.139447
+-0.407842 0.900126 -0.053927 -0.143281
+-0.46584 0.871525 -0.0444406 -0.146501
+-0.521843 0.839192 -0.0347638 -0.149094
+-0.575611 0.803265 -0.0249382 -0.151048
+-0.626915 0.763898 -0.0150058 -0.152356
+-0.675534 0.72126 -0.00500906 -0.153011
+-0.72126 0.675534 0.00500906 -0.153011
+-0.763898 0.626915 0.0150058 -0.152356
+-0.803265 0.575611 0.0249382 -0.151048
+-0.839192 0.521843 0.0347638 -0.149094
+-0.871525 0.46584 0.0444406 -0.146501
+-0.900126 0.407842 0.0539271 -0.143281
+-0.924873 0.348098 0.0631826 -0.139447
+-0.94566 0.286863 0.0721676 -0.135016
+-0.962397 0.224399 0.0808436 -0.130007
+-0.975013 0.160975 0.0891733 -0.124441
+-0.983453 0.0968616 0.0971212 -0.118343
+-0.987683 0.0323335 0.104653 -0.111737
+0.994245 0.0325482 -0.0955205 0.0359514
+0.989988 0.0975053 -0.0976673 0.0296271
+0.981491 0.162045 -0.0993959 0.0231759
+0.968791 0.22589 -0.100699 0.0166254
+0.951943 0.288769 -0.101571 0.0100038
+0.931019 0.350411 -0.102007 0.00333938
+0.906107 0.410552 -0.102007 -0.00333938
+0.877316 0.468935 -0.101571 -0.0100038
+0.844768 0.52531 -0.100699 -0.0166255
+0.808602 0.579436 -0.0993959 -0.0231759
+0.768974 0.63108 -0.0976673 -0.0296271
+0.726053 0.680023 -0.0955205 -0.0359514
+0.680023 0.726053 -0.0929646 -0.0421217
+0.631081 0.768974 -0.0900107 -0.0481117
+0.579436 0.808602 -0.0866713 -0.0538957
+0.52531 0.844768 -0.0829608 -0.0594489
+0.468935 0.877316 -0.078895 -0.0647475
+0.410552 0.906107 -0.0744914 -0.0697689
+0.350411 0.931019 -0.0697688 -0.0744914
+0.288769 0.951943 -0.0647475 -0.0788951
+0.22589 0.968791 -0.0594489 -0.0829608
+0.162045 0.981491 -0.0538957 -0.0866713
+0.0975052 0.989988 -0.0481117 -0.0900107
+0.0325481 0.994245 -0.0421217 -0.0929647
+-0.0325483 0.994245 -0.0359514 -0.0955205
+-0.0975054 0.989988 -0.029627 -0.0976673
+-0.162045 0.981491 -0.0231759 -0.0993959
+-0.225891 0.968791 -0.0166254 -0.100699
+-0.288769 0.951943 -0.0100038 -0.101571
+-0.350411 0.931019 -0.00333936 -0.102007
+-0.410552 0.906107 0.00333939 -0.102007
+-0.468935 0.877316 0.0100038 -0.101571
+-0.52531 0.844768 0.0166254 -0.100699
+-0.579436 0.808602 0.0231759 -0.0993959
+-0.63108 0.768974 0.0296271 -0.0976673
+-0.680023 0.726053 0.0359514 -0.0955205
+-0.726053 0.680023 0.0421217 -0.0929647
+-0.768974 0.63108 0.0481117 -0.0900107
+-0.808602 0.579436 0.0538957 -0.0866713
+-0.844768 0.52531 0.0594489 -0.0829608
+-0.877316 0.468935 0.0647475 -0.0788951
+-0.906107 0.410552 0.0697688 -0.0744914
+-0.931019 0.350411 0.0744914 -0.0697688
+-0.951943 0.288769 0.078895 -0.0647475
+-0.968791 0.22589 0.0829608 -0.0594489
+-0.981491 0.162045 0.0866713 -0.0538957
+-0.989988 0.0975053 0.0900107 -0.0481117
+-0.994245 0.0325483 0.0929646 -0.0421217
+0.994245 0.0325482 -0.0421217 0.0929646
+0.989988 0.0975053 -0.0481117 0.0900107
+0.981491 0.162045 -0.0538957 0.0866713
+0.968791 0.22589 -0.0594489 0.0829608
+0.951943 0.288769 -0.0647475 0.078895
+0.931019 0.350411 -0.0697688 0.0744914
+0.906107 0.410552 -0.0744914 0.0697688
+0.877316 0.468935 -0.0788951 0.0647475
+0.844768 0.52531 -0.0829608 0.0594489
+0.808602 0.579436 -0.0866713 0.0538957
+0.768974 0.63108 -0.0900107 0.0481117
+0.726053 0.680023 -0.0929647 0.0421217
+0.680023 0.726053 -0.0955205 0.0359514
+0.631081 0.768974 -0.0976673 0.0296271
+0.579436 0.808602 -0.0993959 0.0231759
+0.52531 0.844768 -0.100699 0.0166254
+0.468935 0.877316 -0.101571 0.0100038
+0.410552 0.906107 -0.102007 0.00333937
+0.350411 0.931019 -0.102007 -0.00333938
+0.288769 0.951943 -0.101571 -0.0100038
+0.22589 0.968791 -0.100699 -0.0166255
+0.162045 0.981491 -0.0993959 -0.0231759
+0.0975052 0.989988 -0.0976673 -0.0296271
+0.0325481 0.994245 -0.0955205 -0.0359514
+-0.0325483 0.994245 -0.0929646 -0.0421217
+-0.0975054 0.989988 -0.0900107 -0.0481117
+-0.162045 0.981491 -0.0866713 -0.0538957
+-0.225891 0.968791 -0.0829608 -0.0594489
+-0.288769 0.951943 -0.078895 -0.0647475
+-0.350411 0.931019 -0.0744914 -0.0697689
+-0.410552 0.906107 -0.0697688 -0.0744914
+-0.468935 0.877316 -0.0647475 -0.078895
+-0.52531 0.844768 -0.0594489 -0.0829608
+-0.579436 0.808602 -0.0538957 -0.0866713
+-0.63108 0.768974 -0.0481117 -0.0900107
+-0.680023 0.726053 -0.0421217 -0.0929647
+-0.726053 0.680023 -0.0359514 -0.0955205
+-0.768974 0.63108 -0.0296271 -0.0976673
+-0.808602 0.579436 -0.0231759 -0.0993959
+-0.844768 0.52531 -0.0166254 -0.100699
+-0.877316 0.468935 -0.0100038 -0.101571
+-0.906107 0.410552 -0.00333938 -0.102007
+-0.931019 0.350411 0.00333939 -0.102007
+-0.951943 0.288769 0.0100038 -0.101571
+-0.968791 0.22589 0.0166255 -0.100699
+-0.981491 0.162045 0.0231759 -0.0993959
+-0.989988 0.0975053 0.0296271 -0.0976673
+-0.994245 0.0325483 0.0359514 -0.0955205
+0.998162 0.0326765 -0.0372457 0.0348844
+0.993888 0.0978894 -0.0394475 0.0323737
+0.985358 0.162683 -0.0414804 0.0297244
+0.972608 0.22678 -0.0433357 0.0269478
+0.955694 0.289906 -0.0450054 0.0240559
+0.934687 0.351791 -0.0464823 0.0210609
+0.909677 0.412169 -0.0477602 0.0179757
+0.880772 0.470783 -0.0488337 0.0148135
+0.848096 0.52738 -0.049698 0.0115879
+0.811788 0.581719 -0.0503494 0.00831273
+0.772003 0.633567 -0.0507853 0.00500192
+0.728913 0.682702 -0.0510037 0.00166969
+0.682702 0.728913 -0.0510037 -0.00166969
+0.633567 0.772003 -0.0507853 -0.00500192
+0.581719 0.811788 -0.0503494 -0.00831273
+0.52738 0.848096 -0.049698 -0.0115879
+0.470782 0.880772 -0.0488337 -0.0148135
+0.412169 0.909677 -0.0477602 -0.0179757
+0.351791 0.934687 -0.0464823 -0.0210609
+0.289906 0.955694 -0.0450054 -0.0240559
+0.22678 0.972608 -0.0433357 -0.0269479
+0.162683 0.985358 -0.0414804 -0.0297244
+0.0978893 0.993888 -0.0394475 -0.0323738
+0.0326763 0.998162 -0.0372457 -0.0348844
+-0.0326765 0.998162 -0.0348844 -0.0372457
+-0.0978895 0.993888 -0.0323737 -0.0394475
+-0.162683 0.985358 -0.0297244 -0.0414804
+-0.22678 0.972608 -0.0269478 -0.0433357
+-0.289907 0.955693 -0.0240559 -0.0450054
+-0.351791 0.934686 -0.0210609 -0.0464823
+-0.412169 0.909677 -0.0179757 -0.0477603
+-0.470782 0.880772 -0.0148135 -0.0488337
+-0.52738 0.848096 -0.0115879 -0.049698
+-0.581719 0.811788 -0.00831272 -0.0503494
+-0.633567 0.772003 -0.00500192 -0.0507853
+-0.682702 0.728913 -0.00166969 -0.0510037
+-0.728913 0.682702 0.00166969 -0.0510037
+-0.772003 0.633567 0.00500192 -0.0507853
+-0.811788 0.581719 0.00831272 -0.0503494
+-0.848096 0.52738 0.0115879 -0.049698
+-0.880772 0.470783 0.0148135 -0.0488337
+-0.909677 0.412169 0.0179757 -0.0477602
+-0.934687 0.351791 0.0210609 -0.0464823
+-0.955693 0.289906 0.0240559 -0.0450054
+-0.972608 0.22678 0.0269479 -0.0433357
+-0.985358 0.162683 0.0297244 -0.0414804
+-0.993888 0.0978894 0.0323737 -0.0394475
+-0.998162 0.0326765 0.0348844 -0.0372457
+0.735586 0.0240806 -0.462794 -0.49412
+0.732436 0.0721387 -0.429486 -0.523331
+0.72615 0.119888 -0.394339 -0.5503
+0.716754 0.167124 -0.357504 -0.574913
+0.704289 0.213644 -0.319137 -0.597064
+0.688808 0.259249 -0.279404 -0.616658
+0.670378 0.303744 -0.238474 -0.633611
+0.649076 0.346939 -0.196524 -0.647852
+0.624996 0.388647 -0.153731 -0.659318
+0.598239 0.428692 -0.110281 -0.667961
+0.56892 0.466901 -0.0663579 -0.673743
+0.537165 0.50311 -0.0221509 -0.676641
+0.50311 0.537165 0.0221509 -0.676641
+0.466901 0.56892 0.0663579 -0.673743
+0.428692 0.598239 0.110281 -0.667961
+0.388647 0.624996 0.153731 -0.659318
+0.346939 0.649077 0.196524 -0.647852
+0.303744 0.670378 0.238474 -0.633611
+0.259249 0.688808 0.279404 -0.616658
+0.213644 0.704289 0.319137 -0.597064
+0.167124 0.716754 0.357504 -0.574913
+0.119888 0.72615 0.394339 -0.5503
+0.0721386 0.732436 0.429486 -0.52333
+0.0240805 0.735586 0.462794 -0.49412
+-0.0240807 0.735586 0.49412 -0.462794
+-0.0721387 0.732436 0.523331 -0.429486
+-0.119888 0.72615 0.5503 -0.394339
+-0.167124 0.716754 0.574913 -0.357504
+-0.213644 0.704289 0.597064 -0.319137
+-0.259249 0.688808 0.616658 -0.279404
+-0.303744 0.670378 0.633611 -0.238474
+-0.346939 0.649077 0.647852 -0.196524
+-0.388647 0.624996 0.659318 -0.153731
+-0.428692 0.598239 0.667961 -0.110281
+-0.466901 0.56892 0.673743 -0.0663579
+-0.50311 0.537165 0.676641 -0.0221509
+-0.537165 0.50311 0.676641 0.0221509
+-0.56892 0.466901 0.673743 0.0663579
+-0.598239 0.428692 0.667961 0.110281
+-0.624996 0.388647 0.659318 0.153731
+-0.649076 0.346939 0.647852 0.196524
+-0.670378 0.303744 0.633611 0.238474
+-0.688808 0.259249 0.616658 0.279404
+-0.704289 0.213644 0.597064 0.319137
+-0.716754 0.167124 0.574913 0.357504
+-0.72615 0.119888 0.5503 0.394339
+-0.732436 0.0721386 0.523331 0.429486
+-0.735586 0.0240807 0.49412 0.462794
+0.763354 0.0249896 -0.392954 -0.512107
+0.760085 0.0748618 -0.358619 -0.536711
+0.753561 0.124413 -0.322749 -0.559017
+0.743811 0.173432 -0.285496 -0.578929
+0.730875 0.221709 -0.247021 -0.596362
+0.71481 0.269035 -0.207488 -0.611241
+0.695684 0.31521 -0.167067 -0.623502
+0.673578 0.360035 -0.12593 -0.633094
+0.648589 0.403318 -0.0842543 -0.639975
+0.620822 0.444875 -0.0422175 -0.644115
+0.590396 0.484526 -1.17023e-08 -0.645497
+0.557443 0.522102 0.0422176 -0.644115
+0.522102 0.557443 0.0842543 -0.639975
+0.484526 0.590396 0.12593 -0.633094
+0.444875 0.620822 0.167067 -0.623502
+0.403318 0.648589 0.207488 -0.611241
+0.360035 0.673579 0.247021 -0.596362
+0.31521 0.695684 0.285496 -0.578929
+0.269035 0.71481 0.322749 -0.559017
+0.221709 0.730875 0.358619 -0.536711
+0.173432 0.743811 0.392954 -0.512107
+0.124413 0.753561 0.425606 -0.48531
+0.0748617 0.760085 0.456436 -0.456435
+0.0249895 0.763354 0.485311 -0.425606
+-0.0249897 0.763354 0.512107 -0.392954
+-0.0748619 0.760085 0.536711 -0.358619
+-0.124414 0.753561 0.559017 -0.322749
+-0.173432 0.743811 0.578929 -0.285496
+-0.221709 0.730875 0.596362 -0.247021
+-0.269036 0.71481 0.611241 -0.207488
+-0.31521 0.695684 0.623502 -0.167067
+-0.360035 0.673579 0.633094 -0.12593
+-0.403318 0.648589 0.639975 -0.0842543
+-0.444875 0.620822 0.644115 -0.0422175
+-0.484526 0.590397 0.645497 -2.19614e-08
+-0.522102 0.557443 0.644115 0.0422176
+-0.557443 0.522102 0.639975 0.0842543
+-0.590397 0.484526 0.633094 0.12593
+-0.620822 0.444875 0.623502 0.167067
+-0.648589 0.403318 0.611241 0.207488
+-0.673578 0.360035 0.596362 0.247021
+-0.695684 0.31521 0.578929 0.285496
+-0.71481 0.269035 0.559017 0.322749
+-0.730875 0.221709 0.536711 0.358619
+-0.743811 0.173432 0.512107 0.392954
+-0.753561 0.124414 0.485311 0.425606
+-0.760085 0.0748618 0.456435 0.456435
+-0.763354 0.0249897 0.425606 0.48531
+0.763354 0.0249896 -0.485311 -0.425606
+0.760085 0.0748618 -0.456435 -0.456435
+0.753561 0.124413 -0.425606 -0.485311
+0.743811 0.173432 -0.392954 -0.512107
+0.730875 0.221709 -0.358619 -0.536711
+0.71481 0.269035 -0.322749 -0.559017
+0.695684 0.31521 -0.285496 -0.578929
+0.673578 0.360035 -0.247021 -0.596362
+0.648589 0.403318 -0.207488 -0.611241
+0.620822 0.444875 -0.167067 -0.623502
+0.590396 0.484526 -0.12593 -0.633094
+0.557443 0.522102 -0.0842543 -0.639975
+0.522102 0.557443 -0.0422175 -0.644115
+0.484526 0.590396 1.44328e-09 -0.645497
+0.444875 0.620822 0.0422176 -0.644115
+0.403318 0.648589 0.0842544 -0.639975
+0.360035 0.673579 0.12593 -0.633094
+0.31521 0.695684 0.167067 -0.623502
+0.269035 0.71481 0.207488 -0.611241
+0.221709 0.730875 0.247021 -0.596362
+0.173432 0.743811 0.285496 -0.578929
+0.124413 0.753561 0.322749 -0.559017
+0.0748617 0.760085 0.358619 -0.536711
+0.0249895 0.763354 0.392954 -0.512107
+-0.0249897 0.763354 0.425606 -0.48531
+-0.0748619 0.760085 0.456436 -0.456435
+-0.124414 0.753561 0.485311 -0.425606
+-0.173432 0.743811 0.512107 -0.392954
+-0.221709 0.730875 0.536711 -0.358619
+-0.269036 0.71481 0.559017 -0.322749
+-0.31521 0.695684 0.578929 -0.285496
+-0.360035 0.673579 0.596362 -0.247021
+-0.403318 0.648589 0.611241 -0.207488
+-0.444875 0.620822 0.623502 -0.167067
+-0.484526 0.590397 0.633094 -0.12593
+-0.522102 0.557443 0.639975 -0.0842542
+-0.557443 0.522102 0.644115 -0.0422176
+-0.590397 0.484526 0.645497 2.96589e-08
+-0.620822 0.444875 0.644115 0.0422175
+-0.648589 0.403318 0.639975 0.0842543
+-0.673578 0.360035 0.633094 0.12593
+-0.695684 0.31521 0.623502 0.167067
+-0.71481 0.269035 0.611241 0.207488
+-0.730875 0.221709 0.596362 0.247021
+-0.743811 0.173432 0.578929 0.285496
+-0.753561 0.124414 0.559017 0.322749
+-0.760085 0.0748618 0.536711 0.358619
+-0.763354 0.0249897 0.512107 0.392954
+0.790146 0.0258667 -0.418613 -0.446949
+0.786763 0.0774894 -0.388485 -0.47337
+0.78001 0.12878 -0.356693 -0.497765
+0.769917 0.17952 -0.323374 -0.520028
+0.756528 0.22949 -0.28867 -0.540064
+0.739899 0.278478 -0.25273 -0.557788
+0.720101 0.326274 -0.215708 -0.573123
+0.69722 0.372672 -0.177762 -0.586004
+0.671353 0.417474 -0.139055 -0.596375
+0.642612 0.460489 -0.0997527 -0.604193
+0.611118 0.501532 -0.060023 -0.609424
+0.577008 0.540427 -0.0200362 -0.612045
+0.540427 0.577008 0.0200363 -0.612045
+0.501532 0.611118 0.060023 -0.609424
+0.460489 0.642612 0.0997527 -0.604193
+0.417474 0.671353 0.139055 -0.596375
+0.372672 0.69722 0.177762 -0.586004
+0.326274 0.720101 0.215708 -0.573123
+0.278478 0.739899 0.25273 -0.557788
+0.22949 0.756528 0.28867 -0.540064
+0.17952 0.769917 0.323374 -0.520028
+0.12878 0.78001 0.356693 -0.497765
+0.0774893 0.786763 0.388485 -0.47337
+0.0258666 0.790146 0.418613 -0.446949
+-0.0258668 0.790146 0.446949 -0.418613
+-0.0774894 0.786763 0.47337 -0.388485
+-0.12878 0.78001 0.497765 -0.356693
+-0.17952 0.769917 0.520028 -0.323374
+-0.22949 0.756528 0.540064 -0.28867
+-0.278478 0.739899 0.557788 -0.25273
+-0.326274 0.720101 0.573123 -0.215708
+-0.372672 0.69722 0.586004 -0.177762
+-0.417474 0.671353 0.596375 -0.139055
+-0.460489 0.642612 0.604193 -0.0997526
+-0.501532 0.611118 0.609424 -0.060023
+-0.540427 0.577008 0.612045 -0.0200362
+-0.577008 0.540427 0.612045 0.0200362
+-0.611118 0.501532 0.609424 0.060023
+-0.642612 0.460489 0.604193 0.0997526
+-0.671353 0.417474 0.596375 0.139055
+-0.69722 0.372672 0.586004 0.177762
+-0.720101 0.326274 0.573123 0.215708
+-0.739899 0.278478 0.557788 0.25273
+-0.756528 0.22949 0.540064 0.28867
+-0.769917 0.179519 0.520028 0.323374
+-0.78001 0.12878 0.497765 0.356693
+-0.786763 0.0774893 0.47337 0.388485
+-0.790146 0.0258668 0.446949 0.418613
+0.790146 0.0258667 -0.323374 -0.520028
+0.786763 0.0774894 -0.28867 -0.540064
+0.78001 0.12878 -0.25273 -0.557788
+0.769917 0.17952 -0.215708 -0.573123
+0.756528 0.22949 -0.177762 -0.586004
+0.739899 0.278478 -0.139055 -0.596375
+0.720101 0.326274 -0.0997527 -0.604193
+0.69722 0.372672 -0.060023 -0.609424
+0.671353 0.417474 -0.0200363 -0.612045
+0.642612 0.460489 0.0200363 -0.612045
+0.611118 0.501532 0.060023 -0.609424
+0.577008 0.540427 0.0997527 -0.604193
+0.540427 0.577008 0.139055 -0.596375
+0.501532 0.611118 0.177762 -0.586004
+0.460489 0.642612 0.215708 -0.573123
+0.417474 0.671353 0.25273 -0.557788
+0.372672 0.69722 0.28867 -0.540064
+0.326274 0.720101 0.323374 -0.520028
+0.278478 0.739899 0.356693 -0.497765
+0.22949 0.756528 0.388485 -0.47337
+0.17952 0.769917 0.418613 -0.446949
+0.12878 0.78001 0.446949 -0.418613
+0.0774893 0.786763 0.47337 -0.388485
+0.0258666 0.790146 0.497765 -0.356693
+-0.0258668 0.790146 0.520028 -0.323374
+-0.0774894 0.786763 0.540064 -0.28867
+-0.12878 0.78001 0.557788 -0.25273
+-0.17952 0.769917 0.573123 -0.215708
+-0.22949 0.756528 0.586004 -0.177762
+-0.278478 0.739899 0.596375 -0.139055
+-0.326274 0.720101 0.604193 -0.0997526
+-0.372672 0.69722 0.609424 -0.0600231
+-0.417474 0.671353 0.612045 -0.0200363
+-0.460489 0.642612 0.612045 0.0200363
+-0.501532 0.611118 0.609424 0.060023
+-0.540427 0.577008 0.604193 0.0997527
+-0.577008 0.540427 0.596375 0.139055
+-0.611118 0.501532 0.586004 0.177762
+-0.642612 0.460489 0.573123 0.215708
+-0.671353 0.417474 0.557788 0.25273
+-0.69722 0.372672 0.540064 0.28867
+-0.720101 0.326274 0.520028 0.323374
+-0.739899 0.278478 0.497765 0.356693
+-0.756528 0.22949 0.47337 0.388485
+-0.769917 0.179519 0.446949 0.418613
+-0.78001 0.12878 0.418613 0.446949
+-0.786763 0.0774893 0.388485 0.47337
+-0.790146 0.0258668 0.356693 0.497765
+0.816059 0.026715 -0.255355 -0.51781
+0.812565 0.0800307 -0.220942 -0.533402
+0.805591 0.133004 -0.185583 -0.54671
+0.795167 0.185407 -0.149429 -0.557678
+0.781339 0.237016 -0.112635 -0.566257
+0.764164 0.287611 -0.0753593 -0.572411
+0.743717 0.336974 -0.0377605 -0.576114
+0.720086 0.384894 1.21881e-08 -0.57735
+0.693371 0.431166 0.0377605 -0.576114
+0.663687 0.475591 0.0753593 -0.572411
+0.63116 0.51798 0.112635 -0.566257
+0.595932 0.558151 0.149429 -0.557678
+0.558151 0.595932 0.185583 -0.54671
+0.51798 0.63116 0.220942 -0.533402
+0.475591 0.663687 0.255356 -0.51781
+0.431166 0.693371 0.288675 -0.5
+0.384894 0.720086 0.320759 -0.480049
+0.336974 0.743717 0.351469 -0.458043
+0.287611 0.764164 0.380674 -0.434075
+0.237016 0.781339 0.408248 -0.408248
+0.185407 0.795167 0.434075 -0.380673
+0.133003 0.805591 0.458043 -0.351469
+0.0800306 0.812565 0.480049 -0.320759
+0.0267149 0.816059 0.5 -0.288675
+-0.0267151 0.816059 0.51781 -0.255355
+-0.0800307 0.812565 0.533402 -0.220942
+-0.133004 0.805591 0.54671 -0.185583
+-0.185407 0.795167 0.557678 -0.149429
+-0.237017 0.781338 0.566257 -0.112635
+-0.287611 0.764164 0.572411 -0.0753592
+-0.336974 0.743717 0.576114 -0.0377604
+-0.384894 0.720086 0.57735 -6.58134e-08
+-0.431166 0.693371 0.576114 0.0377605
+-0.475591 0.663686 0.572411 0.0753594
+-0.51798 0.63116 0.566257 0.112635
+-0.558151 0.595931 0.557678 0.149429
+-0.595931 0.558151 0.54671 0.185583
+-0.63116 0.51798 0.533402 0.220942
+-0.663686 0.475591 0.51781 0.255355
+-0.693371 0.431166 0.5 0.288675
+-0.720086 0.384894 0.480049 0.320759
+-0.743717 0.336974 0.458043 0.351469
+-0.764164 0.287611 0.434075 0.380674
+-0.781339 0.237016 0.408248 0.408248
+-0.795167 0.185407 0.380673 0.434075
+-0.805591 0.133004 0.351469 0.458043
+-0.812565 0.0800306 0.320759 0.480049
+-0.816059 0.0267151 0.288675 0.5
+0.816059 0.026715 -0.351469 -0.458043
+0.812565 0.0800307 -0.320759 -0.480049
+0.805591 0.133004 -0.288675 -0.5
+0.795167 0.185407 -0.255355 -0.51781
+0.781339 0.237016 -0.220942 -0.533402
+0.764164 0.287611 -0.185583 -0.54671
+0.743717 0.336974 -0.149429 -0.557678
+0.720086 0.384894 -0.112635 -0.566257
+0.693371 0.431166 -0.0753593 -0.572411
+0.663687 0.475591 -0.0377605 -0.576114
+0.63116 0.51798 -1.04669e-08 -0.57735
+0.595932 0.558151 0.0377605 -0.576114
+0.558151 0.595932 0.0753593 -0.572411
+0.51798 0.63116 0.112635 -0.566257
+0.475591 0.663687 0.149429 -0.557678
+0.431166 0.693371 0.185583 -0.54671
+0.384894 0.720086 0.220942 -0.533402
+0.336974 0.743717 0.255356 -0.51781
+0.287611 0.764164 0.288675 -0.5
+0.237016 0.781339 0.320759 -0.480049
+0.185407 0.795167 0.351469 -0.458043
+0.133003 0.805591 0.380674 -0.434075
+0.0800306 0.812565 0.408248 -0.408248
+0.0267149 0.816059 0.434075 -0.380673
+-0.0267151 0.816059 0.458043 -0.351469
+-0.0800307 0.812565 0.480049 -0.320759
+-0.133004 0.805591 0.5 -0.288675
+-0.185407 0.795167 0.51781 -0.255355
+-0.237017 0.781338 0.533402 -0.220942
+-0.287611 0.764164 0.54671 -0.185583
+-0.336974 0.743717 0.557678 -0.149429
+-0.384894 0.720086 0.566257 -0.112636
+-0.431166 0.693371 0.572411 -0.0753593
+-0.475591 0.663686 0.576114 -0.0377605
+-0.51798 0.63116 0.57735 -1.96429e-08
+-0.558151 0.595931 0.576114 0.0377606
+-0.595931 0.558151 0.572411 0.0753593
+-0.63116 0.51798 0.566257 0.112635
+-0.663686 0.475591 0.557678 0.149429
+-0.693371 0.431166 0.54671 0.185583
+-0.720086 0.384894 0.533402 0.220942
+-0.743717 0.336974 0.51781 0.255355
+-0.764164 0.287611 0.5 0.288675
+-0.781339 0.237016 0.480049 0.320759
+-0.795167 0.185407 0.458043 0.351469
+-0.805591 0.133004 0.434075 0.380673
+-0.812565 0.0800306 0.408248 0.408248
+-0.816059 0.0267151 0.380674 0.434075
+0.841175 0.0275372 -0.285189 -0.458622
+0.837573 0.0824937 -0.254583 -0.476292
+0.830384 0.137097 -0.222887 -0.491923
+0.81964 0.191113 -0.190237 -0.505447
+0.805385 0.244311 -0.156772 -0.516807
+0.787682 0.296463 -0.122635 -0.525954
+0.766606 0.347345 -0.0879736 -0.532848
+0.742247 0.396739 -0.0529353 -0.537461
+0.71471 0.444435 -0.0176703 -0.539773
+0.684112 0.490228 0.0176703 -0.539773
+0.650585 0.533921 0.0529353 -0.537461
+0.614272 0.575329 0.0879736 -0.532848
+0.575329 0.614272 0.122635 -0.525954
+0.533922 0.650585 0.156772 -0.516807
+0.490228 0.684112 0.190237 -0.505447
+0.444435 0.71471 0.222887 -0.491923
+0.396739 0.742247 0.254583 -0.476292
+0.347345 0.766606 0.285189 -0.458622
+0.296463 0.787682 0.314574 -0.438987
+0.244311 0.805385 0.342612 -0.417473
+0.191113 0.81964 0.369182 -0.394172
+0.137097 0.830384 0.394172 -0.369182
+0.0824936 0.837573 0.417473 -0.342611
+0.0275371 0.841175 0.438987 -0.314574
+-0.0275373 0.841175 0.458622 -0.285189
+-0.0824938 0.837573 0.476292 -0.254583
+-0.137097 0.830384 0.491923 -0.222887
+-0.191113 0.81964 0.505447 -0.190237
+-0.244311 0.805385 0.516807 -0.156772
+-0.296463 0.787682 0.525954 -0.122635
+-0.347345 0.766606 0.532848 -0.0879735
+-0.396739 0.742247 0.537461 -0.0529354
+-0.444435 0.71471 0.539773 -0.0176703
+-0.490228 0.684112 0.539773 0.0176704
+-0.533921 0.650585 0.537461 0.0529353
+-0.575329 0.614272 0.532848 0.0879736
+-0.614272 0.575329 0.525954 0.122635
+-0.650585 0.533921 0.516807 0.156772
+-0.684112 0.490228 0.505447 0.190237
+-0.71471 0.444435 0.491923 0.222887
+-0.742247 0.39674 0.476292 0.254583
+-0.766606 0.347345 0.458622 0.285189
+-0.787682 0.296463 0.438987 0.314574
+-0.805385 0.244311 0.417473 0.342612
+-0.81964 0.191113 0.394172 0.369182
+-0.830384 0.137097 0.369182 0.394172
+-0.837573 0.0824937 0.342612 0.417473
+-0.841175 0.0275373 0.314574 0.438987
+0.790146 0.0258667 -0.497765 -0.356693
+0.786763 0.0774894 -0.47337 -0.388485
+0.78001 0.12878 -0.446949 -0.418613
+0.769917 0.17952 -0.418613 -0.446949
+0.756528 0.22949 -0.388485 -0.47337
+0.739899 0.278478 -0.356693 -0.497765
+0.720101 0.326274 -0.323374 -0.520028
+0.69722 0.372672 -0.28867 -0.540064
+0.671353 0.417474 -0.25273 -0.557788
+0.642612 0.460489 -0.215708 -0.573123
+0.611118 0.501532 -0.177762 -0.586004
+0.577008 0.540427 -0.139055 -0.596375
+0.540427 0.577008 -0.0997527 -0.604193
+0.501532 0.611118 -0.060023 -0.609424
+0.460489 0.642612 -0.0200362 -0.612045
+0.417474 0.671353 0.0200363 -0.612045
+0.372672 0.69722 0.060023 -0.609424
+0.326274 0.720101 0.0997527 -0.604193
+0.278478 0.739899 0.139055 -0.596375
+0.22949 0.756528 0.177762 -0.586004
+0.17952 0.769917 0.215708 -0.573123
+0.12878 0.78001 0.25273 -0.557788
+0.0774893 0.786763 0.28867 -0.540064
+0.0258666 0.790146 0.323374 -0.520028
+-0.0258668 0.790146 0.356693 -0.497765
+-0.0774894 0.786763 0.388485 -0.47337
+-0.12878 0.78001 0.418613 -0.446949
+-0.17952 0.769917 0.446949 -0.418613
+-0.22949 0.756528 0.47337 -0.388485
+-0.278478 0.739899 0.497765 -0.356693
+-0.326274 0.720101 0.520028 -0.323374
+-0.372672 0.69722 0.540064 -0.28867
+-0.417474 0.671353 0.557788 -0.25273
+-0.460489 0.642612 0.573123 -0.215708
+-0.501532 0.611118 0.586004 -0.177762
+-0.540427 0.577008 0.596375 -0.139055
+-0.577008 0.540427 0.604193 -0.0997527
+-0.611118 0.501532 0.609424 -0.060023
+-0.642612 0.460489 0.612045 -0.0200363
+-0.671353 0.417474 0.612045 0.0200363
+-0.69722 0.372672 0.609424 0.0600229
+-0.720101 0.326274 0.604193 0.0997527
+-0.739899 0.278478 0.596375 0.139055
+-0.756528 0.22949 0.586004 0.177762
+-0.769917 0.179519 0.573123 0.215708
+-0.78001 0.12878 0.557788 0.25273
+-0.786763 0.0774893 0.540064 0.28867
+-0.790146 0.0258668 0.520028 0.323374
+0.816059 0.026715 -0.434075 -0.380673
+0.812565 0.0800307 -0.408248 -0.408248
+0.805591 0.133004 -0.380673 -0.434075
+0.795167 0.185407 -0.351469 -0.458043
+0.781339 0.237016 -0.320759 -0.480049
+0.764164 0.287611 -0.288675 -0.5
+0.743717 0.336974 -0.255355 -0.51781
+0.720086 0.384894 -0.220942 -0.533402
+0.693371 0.431166 -0.185583 -0.54671
+0.663687 0.475591 -0.149429 -0.557678
+0.63116 0.51798 -0.112635 -0.566257
+0.595932 0.558151 -0.0753593 -0.572411
+0.558151 0.595932 -0.0377605 -0.576114
+0.51798 0.63116 1.29091e-09 -0.57735
+0.475591 0.663687 0.0377605 -0.576114
+0.431166 0.693371 0.0753594 -0.572411
+0.384894 0.720086 0.112635 -0.566257
+0.336974 0.743717 0.149429 -0.557678
+0.287611 0.764164 0.185583 -0.54671
+0.237016 0.781339 0.220942 -0.533402
+0.185407 0.795167 0.255356 -0.51781
+0.133003 0.805591 0.288675 -0.5
+0.0800306 0.812565 0.320759 -0.480049
+0.0267149 0.816059 0.351469 -0.458043
+-0.0267151 0.816059 0.380674 -0.434075
+-0.0800307 0.812565 0.408248 -0.408248
+-0.133004 0.805591 0.434075 -0.380673
+-0.185407 0.795167 0.458043 -0.351469
+-0.237017 0.781338 0.480049 -0.320759
+-0.287611 0.764164 0.5 -0.288675
+-0.336974 0.743717 0.51781 -0.255355
+-0.384894 0.720086 0.533402 -0.220942
+-0.431166 0.693371 0.54671 -0.185583
+-0.475591 0.663686 0.557678 -0.149429
+-0.51798 0.63116 0.566257 -0.112635
+-0.558151 0.595931 0.572411 -0.0753593
+-0.595931 0.558151 0.576114 -0.0377605
+-0.63116 0.51798 0.57735 2.65277e-08
+-0.663686 0.475591 0.576114 0.0377605
+-0.693371 0.431166 0.572411 0.0753593
+-0.720086 0.384894 0.566257 0.112635
+-0.743717 0.336974 0.557678 0.149429
+-0.764164 0.287611 0.54671 0.185583
+-0.781339 0.237016 0.533402 0.220942
+-0.795167 0.185407 0.51781 0.255356
+-0.805591 0.133004 0.5 0.288675
+-0.812565 0.0800306 0.480049 0.320759
+-0.816059 0.0267151 0.458043 0.351469
+0.816059 0.026715 -0.5 -0.288675
+0.812565 0.0800307 -0.480049 -0.320759
+0.805591 0.133004 -0.458043 -0.351469
+0.795167 0.185407 -0.434075 -0.380673
+0.781339 0.237016 -0.408248 -0.408248
+0.764164 0.287611 -0.380673 -0.434075
+0.743717 0.336974 -0.351469 -0.458043
+0.720086 0.384894 -0.320759 -0.480049
+0.693371 0.431166 -0.288675 -0.5
+0.663687 0.475591 -0.255355 -0.51781
+0.63116 0.51798 -0.220942 -0.533402
+0.595932 0.558151 -0.185583 -0.54671
+0.558151 0.595932 -0.149429 -0.557678
+0.51798 0.63116 -0.112635 -0.566257
+0.475591 0.663687 -0.0753593 -0.572411
+0.431166 0.693371 -0.0377605 -0.576114
+0.384894 0.720086 4.74615e-08 -0.57735
+0.336974 0.743717 0.0377606 -0.576114
+0.287611 0.764164 0.0753594 -0.572411
+0.237016 0.781339 0.112635 -0.566257
+0.185407 0.795167 0.149429 -0.557678
+0.133003 0.805591 0.185583 -0.54671
+0.0800306 0.812565 0.220942 -0.533402
+0.0267149 0.816059 0.255356 -0.51781
+-0.0267151 0.816059 0.288675 -0.5
+-0.0800307 0.812565 0.320759 -0.480049
+-0.133004 0.805591 0.351469 -0.458043
+-0.185407 0.795167 0.380674 -0.434075
+-0.237017 0.781338 0.408248 -0.408248
+-0.287611 0.764164 0.434075 -0.380673
+-0.336974 0.743717 0.458043 -0.351469
+-0.384894 0.720086 0.480049 -0.320759
+-0.431166 0.693371 0.5 -0.288675
+-0.475591 0.663686 0.51781 -0.255355
+-0.51798 0.63116 0.533402 -0.220942
+-0.558151 0.595931 0.54671 -0.185583
+-0.595931 0.558151 0.557678 -0.149429
+-0.63116 0.51798 0.566257 -0.112635
+-0.663686 0.475591 0.572411 -0.0753594
+-0.693371 0.431166 0.576114 -0.0377605
+-0.720086 0.384894 0.57735 -6.49528e-08
+-0.743717 0.336974 0.576114 0.0377605
+-0.764164 0.287611 0.572411 0.0753594
+-0.781339 0.237016 0.566257 0.112635
+-0.795167 0.185407 0.557678 0.149429
+-0.805591 0.133004 0.54671 0.185583
+-0.812565 0.0800306 0.533402 0.220942
+-0.816059 0.0267151 0.51781 0.255355
+0.841175 0.0275372 -0.438987 -0.314574
+0.837573 0.0824937 -0.417473 -0.342612
+0.830384 0.137097 -0.394172 -0.369182
+0.81964 0.191113 -0.369182 -0.394172
+0.805385 0.244311 -0.342612 -0.417473
+0.787682 0.296463 -0.314574 -0.438987
+0.766606 0.347345 -0.285189 -0.458622
+0.742247 0.396739 -0.254583 -0.476292
+0.71471 0.444435 -0.222887 -0.491923
+0.684112 0.490228 -0.190237 -0.505447
+0.650585 0.533921 -0.156772 -0.516807
+0.614272 0.575329 -0.122635 -0.525954
+0.575329 0.614272 -0.0879736 -0.532848
+0.533922 0.650585 -0.0529353 -0.537461
+0.490228 0.684112 -0.0176703 -0.539773
+0.444435 0.71471 0.0176704 -0.539773
+0.396739 0.742247 0.0529354 -0.537461
+0.347345 0.766606 0.0879736 -0.532848
+0.296463 0.787682 0.122635 -0.525954
+0.244311 0.805385 0.156772 -0.516807
+0.191113 0.81964 0.190237 -0.505447
+0.137097 0.830384 0.222887 -0.491923
+0.0824936 0.837573 0.254583 -0.476292
+0.0275371 0.841175 0.285189 -0.458622
+-0.0275373 0.841175 0.314574 -0.438987
+-0.0824938 0.837573 0.342612 -0.417473
+-0.137097 0.830384 0.369182 -0.394172
+-0.191113 0.81964 0.394172 -0.369182
+-0.244311 0.805385 0.417473 -0.342611
+-0.296463 0.787682 0.438987 -0.314574
+-0.347345 0.766606 0.458622 -0.285189
+-0.396739 0.742247 0.476292 -0.254583
+-0.444435 0.71471 0.491923 -0.222887
+-0.490228 0.684112 0.505447 -0.190237
+-0.533921 0.650585 0.516807 -0.156772
+-0.575329 0.614272 0.525954 -0.122635
+-0.614272 0.575329 0.532848 -0.0879736
+-0.650585 0.533921 0.537461 -0.0529353
+-0.684112 0.490228 0.539773 -0.0176704
+-0.71471 0.444435 0.539773 0.0176703
+-0.742247 0.39674 0.537461 0.0529352
+-0.766606 0.347345 0.532848 0.0879736
+-0.787682 0.296463 0.525954 0.122635
+-0.805385 0.244311 0.516807 0.156772
+-0.81964 0.191113 0.505447 0.190237
+-0.830384 0.137097 0.491923 0.222887
+-0.837573 0.0824937 0.476292 0.254583
+-0.841175 0.0275373 0.458622 0.285189
+0.841175 0.0275372 -0.369182 -0.394172
+0.837573 0.0824937 -0.342612 -0.417473
+0.830384 0.137097 -0.314574 -0.438987
+0.81964 0.191113 -0.285189 -0.458622
+0.805385 0.244311 -0.254583 -0.476292
+0.787682 0.296463 -0.222887 -0.491923
+0.766606 0.347345 -0.190237 -0.505447
+0.742247 0.396739 -0.156772 -0.516807
+0.71471 0.444435 -0.122635 -0.525954
+0.684112 0.490228 -0.0879736 -0.532848
+0.650585 0.533921 -0.0529353 -0.537461
+0.614272 0.575329 -0.0176703 -0.539773
+0.575329 0.614272 0.0176703 -0.539773
+0.533922 0.650585 0.0529353 -0.537461
+0.490228 0.684112 0.0879736 -0.532848
+0.444435 0.71471 0.122635 -0.525954
+0.396739 0.742247 0.156772 -0.516807
+0.347345 0.766606 0.190237 -0.505447
+0.296463 0.787682 0.222887 -0.491923
+0.244311 0.805385 0.254583 -0.476292
+0.191113 0.81964 0.285189 -0.458622
+0.137097 0.830384 0.314574 -0.438987
+0.0824936 0.837573 0.342612 -0.417473
+0.0275371 0.841175 0.369182 -0.394172
+-0.0275373 0.841175 0.394172 -0.369182
+-0.0824938 0.837573 0.417473 -0.342611
+-0.137097 0.830384 0.438987 -0.314574
+-0.191113 0.81964 0.458622 -0.285189
+-0.244311 0.805385 0.476292 -0.254583
+-0.296463 0.787682 0.491923 -0.222887
+-0.347345 0.766606 0.505447 -0.190237
+-0.396739 0.742247 0.516807 -0.156772
+-0.444435 0.71471 0.525954 -0.122635
+-0.490228 0.684112 0.532848 -0.0879736
+-0.533921 0.650585 0.537461 -0.0529353
+-0.575329 0.614272 0.539773 -0.0176703
+-0.614272 0.575329 0.539773 0.0176703
+-0.650585 0.533921 0.537461 0.0529353
+-0.684112 0.490228 0.532848 0.0879736
+-0.71471 0.444435 0.525954 0.122635
+-0.742247 0.39674 0.516807 0.156772
+-0.766606 0.347345 0.505447 0.190237
+-0.787682 0.296463 0.491923 0.222887
+-0.805385 0.244311 0.476292 0.254583
+-0.81964 0.191113 0.458622 0.285189
+-0.830384 0.137097 0.438987 0.314574
+-0.837573 0.0824937 0.417473 0.342612
+-0.841175 0.0275373 0.394172 0.369182
+0.865562 0.0283356 -0.304381 -0.396677
+0.861855 0.0848853 -0.277785 -0.415735
+0.854458 0.141072 -0.25 -0.433013
+0.843402 0.196654 -0.221144 -0.448436
+0.828735 0.251394 -0.191342 -0.46194
+0.810518 0.305057 -0.16072 -0.473465
+0.788831 0.357415 -0.12941 -0.482963
+0.763766 0.408242 -0.0975452 -0.490393
+0.735431 0.45732 -0.0652631 -0.495722
+0.703946 0.50444 -0.0327016 -0.498929
+0.669447 0.549401 -9.06459e-09 -0.5
+0.632081 0.592008 0.0327016 -0.498929
+0.592008 0.632081 0.0652631 -0.495722
+0.549401 0.669447 0.0975452 -0.490393
+0.50444 0.703946 0.12941 -0.482963
+0.45732 0.735431 0.16072 -0.473465
+0.408241 0.763766 0.191342 -0.46194
+0.357415 0.788831 0.221144 -0.448436
+0.305057 0.810518 0.25 -0.433013
+0.251394 0.828735 0.277785 -0.415735
+0.196654 0.843402 0.304381 -0.396677
+0.141072 0.854458 0.329673 -0.37592
+0.0848852 0.861855 0.353553 -0.353553
+0.0283355 0.865562 0.37592 -0.329673
+-0.0283356 0.865562 0.396677 -0.304381
+-0.0848854 0.861855 0.415735 -0.277785
+-0.141072 0.854458 0.433013 -0.25
+-0.196654 0.843402 0.448436 -0.221144
+-0.251394 0.828735 0.46194 -0.191342
+-0.305058 0.810518 0.473465 -0.16072
+-0.357415 0.788831 0.482963 -0.129409
+-0.408241 0.763766 0.490393 -0.0975452
+-0.45732 0.735431 0.495722 -0.0652631
+-0.504441 0.703946 0.498929 -0.0327015
+-0.549401 0.669447 0.5 -1.70112e-08
+-0.592008 0.632081 0.498929 0.0327016
+-0.632081 0.592008 0.495722 0.0652631
+-0.669447 0.549401 0.490393 0.0975452
+-0.703946 0.504441 0.482963 0.129409
+-0.735431 0.45732 0.473465 0.16072
+-0.763766 0.408242 0.46194 0.191342
+-0.788831 0.357415 0.448436 0.221144
+-0.810518 0.305057 0.433013 0.25
+-0.828735 0.251394 0.415735 0.277785
+-0.843402 0.196654 0.396677 0.304381
+-0.854458 0.141072 0.37592 0.329673
+-0.861855 0.0848853 0.353553 0.353553
+-0.865562 0.0283356 0.329673 0.37592
+0.865562 0.0283356 -0.37592 -0.329673
+0.861855 0.0848853 -0.353553 -0.353553
+0.854458 0.141072 -0.329673 -0.37592
+0.843402 0.196654 -0.304381 -0.396677
+0.828735 0.251394 -0.277785 -0.415735
+0.810518 0.305057 -0.25 -0.433013
+0.788831 0.357415 -0.221144 -0.448436
+0.763766 0.408242 -0.191342 -0.46194
+0.735431 0.45732 -0.16072 -0.473465
+0.703946 0.50444 -0.12941 -0.482963
+0.669447 0.549401 -0.0975452 -0.490393
+0.632081 0.592008 -0.0652631 -0.495722
+0.592008 0.632081 -0.0327016 -0.498929
+0.549401 0.669447 1.11796e-09 -0.5
+0.50444 0.703946 0.0327016 -0.498929
+0.45732 0.735431 0.0652631 -0.495722
+0.408241 0.763766 0.0975452 -0.490393
+0.357415 0.788831 0.12941 -0.482963
+0.305057 0.810518 0.16072 -0.473465
+0.251394 0.828735 0.191342 -0.46194
+0.196654 0.843402 0.221144 -0.448436
+0.141072 0.854458 0.25 -0.433013
+0.0848852 0.861855 0.277785 -0.415735
+0.0283355 0.865562 0.304381 -0.396677
+-0.0283356 0.865562 0.329673 -0.37592
+-0.0848854 0.861855 0.353553 -0.353553
+-0.141072 0.854458 0.37592 -0.329673
+-0.196654 0.843402 0.396677 -0.304381
+-0.251394 0.828735 0.415735 -0.277785
+-0.305058 0.810518 0.433013 -0.25
+-0.357415 0.788831 0.448436 -0.221144
+-0.408241 0.763766 0.46194 -0.191342
+-0.45732 0.735431 0.473465 -0.16072
+-0.504441 0.703946 0.482963 -0.129409
+-0.549401 0.669447 0.490393 -0.0975452
+-0.592008 0.632081 0.495722 -0.0652631
+-0.632081 0.592008 0.498929 -0.0327016
+-0.669447 0.549401 0.5 2.29737e-08
+-0.703946 0.504441 0.498929 0.0327015
+-0.735431 0.45732 0.495722 0.0652631
+-0.763766 0.408242 0.490393 0.0975451
+-0.788831 0.357415 0.482963 0.12941
+-0.810518 0.305057 0.473465 0.16072
+-0.828735 0.251394 0.46194 0.191342
+-0.843402 0.196654 0.448436 0.221144
+-0.854458 0.141072 0.433013 0.25
+-0.861855 0.0848853 0.415735 0.277785
+-0.865562 0.0283356 0.396677 0.304381
+0.88928 0.029112 -0.312016 -0.333136
+0.885472 0.0872114 -0.28956 -0.352829
+0.877872 0.144937 -0.265863 -0.371012
+0.866513 0.202043 -0.241029 -0.387606
+0.851444 0.258283 -0.215162 -0.40254
+0.832728 0.313417 -0.188374 -0.415751
+0.810447 0.367209 -0.160779 -0.427181
+0.784695 0.419428 -0.132496 -0.436782
+0.755583 0.469852 -0.103646 -0.444512
+0.723236 0.518263 -0.0743513 -0.450339
+0.687791 0.564456 -0.0447385 -0.454238
+0.649401 0.608231 -0.0149341 -0.456191
+0.608231 0.649401 0.0149342 -0.456191
+0.564456 0.687791 0.0447385 -0.454238
+0.518263 0.723236 0.0743513 -0.450339
+0.469852 0.755583 0.103646 -0.444512
+0.419428 0.784695 0.132496 -0.436781
+0.367209 0.810447 0.160779 -0.427181
+0.313417 0.832728 0.188374 -0.415751
+0.258283 0.851444 0.215162 -0.40254
+0.202043 0.866513 0.241029 -0.387606
+0.144937 0.877872 0.265864 -0.371012
+0.0872113 0.885472 0.28956 -0.352829
+0.0291119 0.88928 0.312016 -0.333136
+-0.0291121 0.88928 0.333136 -0.312016
+-0.0872115 0.885472 0.352829 -0.28956
+-0.144937 0.877872 0.371012 -0.265863
+-0.202043 0.866513 0.387606 -0.241029
+-0.258283 0.851444 0.40254 -0.215162
+-0.313417 0.832728 0.415751 -0.188374
+-0.367209 0.810447 0.427181 -0.160779
+-0.419428 0.784695 0.436781 -0.132496
+-0.469852 0.755583 0.444512 -0.103646
+-0.518263 0.723236 0.450339 -0.0743512
+-0.564456 0.687791 0.454238 -0.0447385
+-0.608231 0.649401 0.456191 -0.0149341
+-0.649401 0.608231 0.456191 0.0149341
+-0.687791 0.564456 0.454238 0.0447385
+-0.723236 0.518263 0.450339 0.0743512
+-0.755583 0.469852 0.444512 0.103646
+-0.784695 0.419428 0.436782 0.132496
+-0.810447 0.367209 0.427181 0.160779
+-0.832728 0.313417 0.415751 0.188374
+-0.851444 0.258283 0.40254 0.215162
+-0.866513 0.202043 0.387606 0.241029
+-0.877872 0.144937 0.371012 0.265863
+-0.885472 0.0872113 0.352829 0.28956
+-0.88928 0.0291121 0.333136 0.312016
+0.841175 0.0275372 -0.190237 -0.505447
+0.837573 0.0824937 -0.156772 -0.516807
+0.830384 0.137097 -0.122635 -0.525954
+0.81964 0.191113 -0.0879736 -0.532848
+0.805385 0.244311 -0.0529353 -0.537461
+0.787682 0.296463 -0.0176703 -0.539773
+0.766606 0.347345 0.0176703 -0.539773
+0.742247 0.396739 0.0529353 -0.537461
+0.71471 0.444435 0.0879736 -0.532848
+0.684112 0.490228 0.122635 -0.525954
+0.650585 0.533921 0.156772 -0.516807
+0.614272 0.575329 0.190237 -0.505447
+0.575329 0.614272 0.222887 -0.491923
+0.533922 0.650585 0.254583 -0.476292
+0.490228 0.684112 0.285189 -0.458622
+0.444435 0.71471 0.314574 -0.438987
+0.396739 0.742247 0.342612 -0.417473
+0.347345 0.766606 0.369182 -0.394172
+0.296463 0.787682 0.394172 -0.369182
+0.244311 0.805385 0.417473 -0.342612
+0.191113 0.81964 0.438987 -0.314574
+0.137097 0.830384 0.458622 -0.285189
+0.0824936 0.837573 0.476292 -0.254583
+0.0275371 0.841175 0.491923 -0.222887
+-0.0275373 0.841175 0.505447 -0.190237
+-0.0824938 0.837573 0.516807 -0.156772
+-0.137097 0.830384 0.525954 -0.122635
+-0.191113 0.81964 0.532848 -0.0879736
+-0.244311 0.805385 0.537461 -0.0529352
+-0.296463 0.787682 0.539773 -0.0176702
+-0.347345 0.766606 0.539773 0.0176704
+-0.396739 0.742247 0.537461 0.0529352
+-0.444435 0.71471 0.532848 0.0879736
+-0.490228 0.684112 0.525954 0.122635
+-0.533921 0.650585 0.516807 0.156772
+-0.575329 0.614272 0.505447 0.190237
+-0.614272 0.575329 0.491923 0.222887
+-0.650585 0.533921 0.476292 0.254583
+-0.684112 0.490228 0.458622 0.285189
+-0.71471 0.444435 0.438987 0.314574
+-0.742247 0.39674 0.417473 0.342611
+-0.766606 0.347345 0.394172 0.369182
+-0.787682 0.296463 0.369182 0.394172
+-0.805385 0.244311 0.342612 0.417473
+-0.81964 0.191113 0.314574 0.438987
+-0.830384 0.137097 0.285189 0.458622
+-0.837573 0.0824937 0.254583 0.476292
+-0.841175 0.0275373 0.222887 0.491923
+0.865562 0.0283356 -0.12941 -0.482963
+0.861855 0.0848853 -0.0975452 -0.490393
+0.854458 0.141072 -0.0652631 -0.495722
+0.843402 0.196654 -0.0327016 -0.498929
+0.828735 0.251394 3.72652e-10 -0.5
+0.810518 0.305057 0.0327016 -0.498929
+0.788831 0.357415 0.0652631 -0.495722
+0.763766 0.408242 0.0975452 -0.490393
+0.735431 0.45732 0.12941 -0.482963
+0.703946 0.50444 0.16072 -0.473465
+0.669447 0.549401 0.191342 -0.46194
+0.632081 0.592008 0.221144 -0.448436
+0.592008 0.632081 0.25 -0.433013
+0.549401 0.669447 0.277785 -0.415735
+0.50444 0.703946 0.304381 -0.396677
+0.45732 0.735431 0.329673 -0.37592
+0.408241 0.763766 0.353553 -0.353553
+0.357415 0.788831 0.37592 -0.329673
+0.305057 0.810518 0.396677 -0.304381
+0.251394 0.828735 0.415735 -0.277785
+0.196654 0.843402 0.433013 -0.25
+0.141072 0.854458 0.448436 -0.221144
+0.0848852 0.861855 0.46194 -0.191342
+0.0283355 0.865562 0.473465 -0.16072
+-0.0283356 0.865562 0.482963 -0.129409
+-0.0848854 0.861855 0.490393 -0.0975451
+-0.141072 0.854458 0.495722 -0.0652631
+-0.196654 0.843402 0.498929 -0.0327015
+-0.251394 0.828735 0.5 8.1833e-08
+-0.305058 0.810518 0.498929 0.0327016
+-0.357415 0.788831 0.495722 0.0652632
+-0.408241 0.763766 0.490393 0.0975451
+-0.45732 0.735431 0.482963 0.12941
+-0.504441 0.703946 0.473465 0.16072
+-0.549401 0.669447 0.46194 0.191342
+-0.592008 0.632081 0.448436 0.221144
+-0.632081 0.592008 0.433013 0.25
+-0.669447 0.549401 0.415735 0.277785
+-0.703946 0.504441 0.396677 0.304381
+-0.735431 0.45732 0.37592 0.329673
+-0.763766 0.408242 0.353553 0.353553
+-0.788831 0.357415 0.329673 0.37592
+-0.810518 0.305057 0.304381 0.396677
+-0.828735 0.251394 0.277785 0.415735
+-0.843402 0.196654 0.25 0.433013
+-0.854458 0.141072 0.221144 0.448436
+-0.861855 0.0848853 0.191342 0.46194
+-0.865562 0.0283356 0.16072 0.473465
+0.865562 0.0283356 -0.221144 -0.448436
+0.861855 0.0848853 -0.191342 -0.46194
+0.854458 0.141072 -0.16072 -0.473465
+0.843402 0.196654 -0.12941 -0.482963
+0.828735 0.251394 -0.0975452 -0.490393
+0.810518 0.305057 -0.0652631 -0.495722
+0.788831 0.357415 -0.0327016 -0.498929
+0.763766 0.408242 1.05552e-08 -0.5
+0.735431 0.45732 0.0327016 -0.498929
+0.703946 0.50444 0.0652631 -0.495722
+0.669447 0.549401 0.0975452 -0.490393
+0.632081 0.592008 0.12941 -0.482963
+0.592008 0.632081 0.16072 -0.473465
+0.549401 0.669447 0.191342 -0.46194
+0.50444 0.703946 0.221144 -0.448436
+0.45732 0.735431 0.25 -0.433013
+0.408241 0.763766 0.277785 -0.415735
+0.357415 0.788831 0.304381 -0.396677
+0.305057 0.810518 0.329673 -0.37592
+0.251394 0.828735 0.353553 -0.353553
+0.196654 0.843402 0.37592 -0.329673
+0.141072 0.854458 0.396677 -0.304381
+0.0848852 0.861855 0.415735 -0.277785
+0.0283355 0.865562 0.433013 -0.25
+-0.0283356 0.865562 0.448436 -0.221144
+-0.0848854 0.861855 0.46194 -0.191342
+-0.141072 0.854458 0.473465 -0.16072
+-0.196654 0.843402 0.482963 -0.129409
+-0.251394 0.828735 0.490393 -0.0975451
+-0.305058 0.810518 0.495722 -0.065263
+-0.357415 0.788831 0.498929 -0.0327015
+-0.408241 0.763766 0.5 -5.69961e-08
+-0.45732 0.735431 0.498929 0.0327016
+-0.504441 0.703946 0.495722 0.0652631
+-0.549401 0.669447 0.490393 0.0975451
+-0.592008 0.632081 0.482963 0.12941
+-0.632081 0.592008 0.473465 0.16072
+-0.669447 0.549401 0.46194 0.191342
+-0.703946 0.504441 0.448436 0.221144
+-0.735431 0.45732 0.433013 0.25
+-0.763766 0.408242 0.415735 0.277785
+-0.788831 0.357415 0.396677 0.304381
+-0.810518 0.305057 0.37592 0.329673
+-0.828735 0.251394 0.353553 0.353553
+-0.843402 0.196654 0.329673 0.37592
+-0.854458 0.141072 0.304381 0.396677
+-0.861855 0.0848853 0.277785 0.415735
+-0.865562 0.0283356 0.25 0.433013
+0.88928 0.029112 -0.160779 -0.427181
+0.885472 0.0872114 -0.132496 -0.436782
+0.877872 0.144937 -0.103646 -0.444512
+0.866513 0.202043 -0.0743513 -0.450339
+0.851444 0.258283 -0.0447385 -0.454238
+0.832728 0.313417 -0.0149342 -0.456191
+0.810447 0.367209 0.0149342 -0.456191
+0.784695 0.419428 0.0447385 -0.454238
+0.755583 0.469852 0.0743513 -0.450339
+0.723236 0.518263 0.103646 -0.444512
+0.687791 0.564456 0.132496 -0.436782
+0.649401 0.608231 0.160779 -0.427181
+0.608231 0.649401 0.188374 -0.415751
+0.564456 0.687791 0.215162 -0.40254
+0.518263 0.723236 0.241029 -0.387606
+0.469852 0.755583 0.265864 -0.371012
+0.419428 0.784695 0.28956 -0.352829
+0.367209 0.810447 0.312016 -0.333136
+0.313417 0.832728 0.333136 -0.312016
+0.258283 0.851444 0.352829 -0.28956
+0.202043 0.866513 0.371012 -0.265863
+0.144937 0.877872 0.387606 -0.241029
+0.0872113 0.885472 0.40254 -0.215162
+0.0291119 0.88928 0.415751 -0.188374
+-0.0291121 0.88928 0.427181 -0.160779
+-0.0872115 0.885472 0.436782 -0.132496
+-0.144937 0.877872 0.444512 -0.103646
+-0.202043 0.866513 0.450339 -0.0743512
+-0.258283 0.851444 0.454238 -0.0447384
+-0.313417 0.832728 0.456191 -0.0149341
+-0.367209 0.810447 0.456191 0.0149342
+-0.419428 0.784695 0.454238 0.0447384
+-0.469852 0.755583 0.450339 0.0743513
+-0.518263 0.723236 0.444512 0.103646
+-0.564456 0.687791 0.436782 0.132496
+-0.608231 0.649401 0.427181 0.160779
+-0.649401 0.608231 0.415751 0.188374
+-0.687791 0.564456 0.40254 0.215162
+-0.723236 0.518263 0.387606 0.241029
+-0.755583 0.469852 0.371012 0.265863
+-0.784695 0.419428 0.352829 0.28956
+-0.810447 0.367209 0.333136 0.312016
+-0.832728 0.313417 0.312016 0.333136
+-0.851444 0.258283 0.28956 0.352829
+-0.866513 0.202043 0.265863 0.371012
+-0.877872 0.144937 0.241029 0.387606
+-0.885472 0.0872113 0.215162 0.40254
+-0.88928 0.0291121 0.188374 0.415751
+0.88928 0.029112 -0.0743513 -0.450339
+0.885472 0.0872114 -0.0447385 -0.454238
+0.877872 0.144937 -0.0149342 -0.456191
+0.866513 0.202043 0.0149342 -0.456191
+0.851444 0.258283 0.0447385 -0.454238
+0.832728 0.313417 0.0743513 -0.450339
+0.810447 0.367209 0.103646 -0.444512
+0.784695 0.419428 0.132496 -0.436782
+0.755583 0.469852 0.160779 -0.427181
+0.723236 0.518263 0.188374 -0.415751
+0.687791 0.564456 0.215162 -0.40254
+0.649401 0.608231 0.241029 -0.387606
+0.608231 0.649401 0.265863 -0.371012
+0.564456 0.687791 0.28956 -0.352829
+0.518263 0.723236 0.312016 -0.333136
+0.469852 0.755583 0.333136 -0.312016
+0.419428 0.784695 0.352829 -0.28956
+0.367209 0.810447 0.371012 -0.265863
+0.313417 0.832728 0.387606 -0.241029
+0.258283 0.851444 0.40254 -0.215162
+0.202043 0.866513 0.415751 -0.188374
+0.144937 0.877872 0.427181 -0.160779
+0.0872113 0.885472 0.436782 -0.132496
+0.0291119 0.88928 0.444512 -0.103646
+-0.0291121 0.88928 0.450339 -0.0743512
+-0.0872115 0.885472 0.454238 -0.0447385
+-0.144937 0.877872 0.456191 -0.0149341
+-0.202043 0.866513 0.456191 0.0149342
+-0.258283 0.851444 0.454238 0.0447386
+-0.313417 0.832728 0.450339 0.0743513
+-0.367209 0.810447 0.444512 0.103646
+-0.419428 0.784695 0.436782 0.132496
+-0.469852 0.755583 0.427181 0.160779
+-0.518263 0.723236 0.415751 0.188374
+-0.564456 0.687791 0.40254 0.215162
+-0.608231 0.649401 0.387606 0.241029
+-0.649401 0.608231 0.371012 0.265863
+-0.687791 0.564456 0.352829 0.28956
+-0.723236 0.518263 0.333136 0.312016
+-0.755583 0.469852 0.312016 0.333136
+-0.784695 0.419428 0.28956 0.352829
+-0.810447 0.367209 0.265863 0.371012
+-0.832728 0.313417 0.241029 0.387606
+-0.851444 0.258283 0.215162 0.40254
+-0.866513 0.202043 0.188374 0.415751
+-0.877872 0.144937 0.160779 0.427181
+-0.885472 0.0872113 0.132496 0.436782
+-0.88928 0.0291121 0.103646 0.444512
+0.912382 0.0298683 -0.0267007 -0.407374
+0.908475 0.089477 1.11532e-09 -0.408248
+0.900678 0.148703 0.0267007 -0.407374
+0.889024 0.207291 0.0532871 -0.404756
+0.873563 0.264992 0.0796453 -0.400404
+0.854361 0.321559 0.105662 -0.394338
+0.831501 0.376748 0.131227 -0.386583
+0.80508 0.430324 0.15623 -0.377172
+0.775212 0.482058 0.180564 -0.366147
+0.742024 0.531727 0.204124 -0.353553
+0.705659 0.579119 0.226811 -0.339446
+0.666272 0.624032 0.248526 -0.323885
+0.624032 0.666272 0.269177 -0.306937
+0.579119 0.705659 0.288675 -0.288675
+0.531727 0.742024 0.306937 -0.269177
+0.482058 0.775212 0.323885 -0.248526
+0.430324 0.80508 0.339446 -0.226811
+0.376748 0.831501 0.353553 -0.204124
+0.321559 0.854361 0.366147 -0.180564
+0.264992 0.873563 0.377172 -0.15623
+0.207291 0.889024 0.386583 -0.131227
+0.148702 0.900678 0.394338 -0.105662
+0.0894769 0.908475 0.400404 -0.0796452
+0.0298682 0.912382 0.404756 -0.0532871
+-0.0298684 0.912382 0.407374 -0.0267007
+-0.0894771 0.908475 0.408248 3.41689e-08
+-0.148703 0.900678 0.407374 0.0267007
+-0.207291 0.889024 0.404756 0.0532871
+-0.264993 0.873563 0.400404 0.0796454
+-0.321559 0.854361 0.394338 0.105662
+-0.376748 0.831501 0.386583 0.131227
+-0.430324 0.80508 0.377172 0.15623
+-0.482058 0.775212 0.366147 0.180564
+-0.531727 0.742024 0.353553 0.204124
+-0.579119 0.705659 0.339446 0.226811
+-0.624032 0.666272 0.323885 0.248526
+-0.666272 0.624032 0.306937 0.269177
+-0.705659 0.579119 0.288675 0.288675
+-0.742024 0.531727 0.269177 0.306937
+-0.775212 0.482058 0.248526 0.323885
+-0.80508 0.430324 0.226811 0.339446
+-0.831501 0.376748 0.204124 0.353553
+-0.854361 0.321559 0.180564 0.366147
+-0.873563 0.264992 0.15623 0.377172
+-0.889024 0.207291 0.131227 0.386583
+-0.900678 0.148703 0.105662 0.394338
+-0.908475 0.089477 0.0796453 0.400404
+-0.912382 0.0298684 0.0532871 0.404756
+0.912382 0.0298683 -0.105662 -0.394338
+0.908475 0.089477 -0.0796453 -0.400404
+0.900678 0.148703 -0.0532871 -0.404756
+0.889024 0.207291 -0.0267007 -0.407374
+0.873563 0.264992 3.04269e-10 -0.408248
+0.854361 0.321559 0.0267007 -0.407374
+0.831501 0.376748 0.0532871 -0.404756
+0.80508 0.430324 0.0796453 -0.400404
+0.775212 0.482058 0.105662 -0.394338
+0.742024 0.531727 0.131227 -0.386583
+0.705659 0.579119 0.15623 -0.377172
+0.666272 0.624032 0.180564 -0.366147
+0.624032 0.666272 0.204124 -0.353553
+0.579119 0.705659 0.226811 -0.339446
+0.531727 0.742024 0.248526 -0.323885
+0.482058 0.775212 0.269177 -0.306937
+0.430324 0.80508 0.288675 -0.288675
+0.376748 0.831501 0.306937 -0.269177
+0.321559 0.854361 0.323885 -0.248526
+0.264992 0.873563 0.339446 -0.226811
+0.207291 0.889024 0.353553 -0.204124
+0.148702 0.900678 0.366147 -0.180564
+0.0894769 0.908475 0.377172 -0.15623
+0.0298682 0.912382 0.386583 -0.131227
+-0.0298684 0.912382 0.394338 -0.105662
+-0.0894771 0.908475 0.400404 -0.0796453
+-0.148703 0.900678 0.404756 -0.0532871
+-0.207291 0.889024 0.407374 -0.0267007
+-0.264993 0.873563 0.408248 6.68164e-08
+-0.321559 0.854361 0.407374 0.0267008
+-0.376748 0.831501 0.404756 0.0532872
+-0.430324 0.80508 0.400404 0.0796452
+-0.482058 0.775212 0.394338 0.105662
+-0.531727 0.742024 0.386583 0.131227
+-0.579119 0.705659 0.377172 0.15623
+-0.624032 0.666272 0.366147 0.180564
+-0.666272 0.624032 0.353553 0.204124
+-0.705659 0.579119 0.339446 0.226811
+-0.742024 0.531727 0.323885 0.248526
+-0.775212 0.482058 0.306937 0.269177
+-0.80508 0.430324 0.288675 0.288675
+-0.831501 0.376748 0.269177 0.306937
+-0.854361 0.321559 0.248526 0.323885
+-0.873563 0.264992 0.226811 0.339446
+-0.889024 0.207291 0.204124 0.353553
+-0.900678 0.148703 0.180564 0.366147
+-0.908475 0.089477 0.15623 0.377172
+-0.912382 0.0298684 0.131227 0.386583
+0.933521 0.0305603 -0.0283599 -0.35609
+0.929524 0.0915501 -0.0050098 -0.357182
+0.921546 0.152148 0.0183618 -0.356745
+0.909622 0.212094 0.0416547 -0.35478
+0.893803 0.271132 0.0647692 -0.351296
+0.874156 0.329009 0.0876064 -0.346308
+0.850766 0.385477 0.110069 -0.339837
+0.823733 0.440295 0.132059 -0.33191
+0.793173 0.493227 0.153484 -0.322563
+0.759216 0.544047 0.174252 -0.311834
+0.722008 0.592537 0.194274 -0.299769
+0.681709 0.63849 0.213464 -0.286421
+0.63849 0.681709 0.23174 -0.271847
+0.592537 0.722008 0.249023 -0.256108
+0.544047 0.759216 0.265241 -0.239273
+0.493227 0.793173 0.280322 -0.221413
+0.440295 0.823733 0.294203 -0.202605
+0.385477 0.850766 0.306824 -0.18293
+0.329009 0.874156 0.318131 -0.162471
+0.271132 0.893803 0.328076 -0.141316
+0.212094 0.909622 0.336616 -0.119556
+0.152148 0.921546 0.343715 -0.0972846
+0.09155 0.929524 0.349342 -0.0745963
+0.0305602 0.933521 0.353472 -0.0515885
+-0.0305604 0.933521 0.35609 -0.0283599
+-0.0915502 0.929524 0.357182 -0.00500977
+-0.152148 0.921546 0.356745 0.0183618
+-0.212094 0.909622 0.35478 0.0416547
+-0.271132 0.893803 0.351296 0.0647693
+-0.329009 0.874156 0.346308 0.0876065
+-0.385477 0.850766 0.339837 0.110069
+-0.440295 0.823733 0.33191 0.132059
+-0.493227 0.793173 0.322563 0.153484
+-0.544047 0.759216 0.311834 0.174252
+-0.592537 0.722008 0.299769 0.194274
+-0.63849 0.681709 0.286421 0.213464
+-0.681709 0.63849 0.271847 0.23174
+-0.722008 0.592537 0.256108 0.249023
+-0.759216 0.544047 0.239273 0.265241
+-0.793173 0.493227 0.221413 0.280322
+-0.823733 0.440295 0.202605 0.294203
+-0.850766 0.385477 0.18293 0.306824
+-0.874156 0.329009 0.162471 0.318131
+-0.893803 0.271132 0.141316 0.328076
+-0.909622 0.212094 0.119556 0.336616
+-0.921546 0.152148 0.0972846 0.343715
+-0.929524 0.0915501 0.0745963 0.349342
+-0.933521 0.0305604 0.0515886 0.353472
+0.88928 0.029112 -0.241029 -0.387606
+0.885472 0.0872114 -0.215162 -0.40254
+0.877872 0.144937 -0.188374 -0.415751
+0.866513 0.202043 -0.160779 -0.427181
+0.851444 0.258283 -0.132496 -0.436782
+0.832728 0.313417 -0.103646 -0.444512
+0.810447 0.367209 -0.0743513 -0.450339
+0.784695 0.419428 -0.0447385 -0.454238
+0.755583 0.469852 -0.0149341 -0.456191
+0.723236 0.518263 0.0149341 -0.456191
+0.687791 0.564456 0.0447385 -0.454238
+0.649401 0.608231 0.0743513 -0.450339
+0.608231 0.649401 0.103646 -0.444512
+0.564456 0.687791 0.132496 -0.436782
+0.518263 0.723236 0.160779 -0.427181
+0.469852 0.755583 0.188374 -0.415751
+0.419428 0.784695 0.215162 -0.40254
+0.367209 0.810447 0.241029 -0.387606
+0.313417 0.832728 0.265863 -0.371012
+0.258283 0.851444 0.28956 -0.352829
+0.202043 0.866513 0.312016 -0.333136
+0.144937 0.877872 0.333136 -0.312016
+0.0872113 0.885472 0.352829 -0.28956
+0.0291119 0.88928 0.371012 -0.265863
+-0.0291121 0.88928 0.387606 -0.241029
+-0.0872115 0.885472 0.40254 -0.215162
+-0.144937 0.877872 0.415751 -0.188374
+-0.202043 0.866513 0.427181 -0.160779
+-0.258283 0.851444 0.436782 -0.132496
+-0.313417 0.832728 0.444512 -0.103646
+-0.367209 0.810447 0.450339 -0.0743512
+-0.419428 0.784695 0.454238 -0.0447386
+-0.469852 0.755583 0.456191 -0.0149342
+-0.518263 0.723236 0.456191 0.0149342
+-0.564456 0.687791 0.454238 0.0447385
+-0.608231 0.649401 0.450339 0.0743513
+-0.649401 0.608231 0.444512 0.103646
+-0.687791 0.564456 0.436782 0.132496
+-0.723236 0.518263 0.427181 0.160779
+-0.755583 0.469852 0.415751 0.188374
+-0.784695 0.419428 0.40254 0.215162
+-0.810447 0.367209 0.387606 0.241029
+-0.832728 0.313417 0.371012 0.265864
+-0.851444 0.258283 0.352829 0.28956
+-0.866513 0.202043 0.333136 0.312016
+-0.877872 0.144937 0.312016 0.333136
+-0.885472 0.0872113 0.28956 0.352829
+-0.88928 0.0291121 0.265864 0.371012
+0.912382 0.0298683 -0.180564 -0.366147
+0.908475 0.089477 -0.15623 -0.377172
+0.900678 0.148703 -0.131227 -0.386583
+0.889024 0.207291 -0.105662 -0.394338
+0.873563 0.264992 -0.0796453 -0.400404
+0.854361 0.321559 -0.0532871 -0.404756
+0.831501 0.376748 -0.0267007 -0.407374
+0.80508 0.430324 8.61828e-09 -0.408248
+0.775212 0.482058 0.0267007 -0.407374
+0.742024 0.531727 0.0532871 -0.404756
+0.705659 0.579119 0.0796453 -0.400404
+0.666272 0.624032 0.105662 -0.394338
+0.624032 0.666272 0.131227 -0.386583
+0.579119 0.705659 0.15623 -0.377172
+0.531727 0.742024 0.180564 -0.366147
+0.482058 0.775212 0.204124 -0.353553
+0.430324 0.80508 0.226811 -0.339446
+0.376748 0.831501 0.248526 -0.323885
+0.321559 0.854361 0.269177 -0.306937
+0.264992 0.873563 0.288675 -0.288675
+0.207291 0.889024 0.306937 -0.269177
+0.148702 0.900678 0.323885 -0.248526
+0.0894769 0.908475 0.339446 -0.226811
+0.0298682 0.912382 0.353553 -0.204124
+-0.0298684 0.912382 0.366147 -0.180564
+-0.0894771 0.908475 0.377172 -0.15623
+-0.148703 0.900678 0.386583 -0.131227
+-0.207291 0.889024 0.394338 -0.105662
+-0.264993 0.873563 0.400404 -0.0796452
+-0.321559 0.854361 0.404756 -0.053287
+-0.376748 0.831501 0.407374 -0.0267007
+-0.430324 0.80508 0.408248 -4.65371e-08
+-0.482058 0.775212 0.407374 0.0267007
+-0.531727 0.742024 0.404756 0.0532871
+-0.579119 0.705659 0.400404 0.0796453
+-0.624032 0.666272 0.394338 0.105662
+-0.666272 0.624032 0.386583 0.131227
+-0.705659 0.579119 0.377172 0.15623
+-0.742024 0.531727 0.366147 0.180564
+-0.775212 0.482058 0.353553 0.204124
+-0.80508 0.430324 0.339446 0.226811
+-0.831501 0.376748 0.323885 0.248526
+-0.854361 0.321559 0.306937 0.269177
+-0.873563 0.264992 0.288675 0.288675
+-0.889024 0.207291 0.269177 0.306937
+-0.900678 0.148703 0.248526 0.323885
+-0.908475 0.089477 0.226811 0.339446
+-0.912382 0.0298684 0.204124 0.353553
+0.912382 0.0298683 -0.248526 -0.323885
+0.908475 0.089477 -0.226811 -0.339446
+0.900678 0.148703 -0.204124 -0.353553
+0.889024 0.207291 -0.180564 -0.366147
+0.873563 0.264992 -0.15623 -0.377172
+0.854361 0.321559 -0.131227 -0.386583
+0.831501 0.376748 -0.105662 -0.394338
+0.80508 0.430324 -0.0796453 -0.400404
+0.775212 0.482058 -0.0532871 -0.404756
+0.742024 0.531727 -0.0267007 -0.407374
+0.705659 0.579119 -7.4012e-09 -0.408248
+0.666272 0.624032 0.0267007 -0.407374
+0.624032 0.666272 0.0532871 -0.404756
+0.579119 0.705659 0.0796453 -0.400404
+0.531727 0.742024 0.105662 -0.394338
+0.482058 0.775212 0.131227 -0.386583
+0.430324 0.80508 0.15623 -0.377172
+0.376748 0.831501 0.180564 -0.366147
+0.321559 0.854361 0.204124 -0.353553
+0.264992 0.873563 0.226811 -0.339446
+0.207291 0.889024 0.248526 -0.323885
+0.148702 0.900678 0.269177 -0.306937
+0.0894769 0.908475 0.288675 -0.288675
+0.0298682 0.912382 0.306937 -0.269177
+-0.0298684 0.912382 0.323885 -0.248526
+-0.0894771 0.908475 0.339446 -0.226811
+-0.148703 0.900678 0.353553 -0.204124
+-0.207291 0.889024 0.366147 -0.180564
+-0.264993 0.873563 0.377172 -0.15623
+-0.321559 0.854361 0.386583 -0.131227
+-0.376748 0.831501 0.394338 -0.105662
+-0.430324 0.80508 0.400404 -0.0796453
+-0.482058 0.775212 0.404756 -0.0532871
+-0.531727 0.742024 0.407374 -0.0267007
+-0.579119 0.705659 0.408248 -1.38896e-08
+-0.624032 0.666272 0.407374 0.0267007
+-0.666272 0.624032 0.404756 0.0532871
+-0.705659 0.579119 0.400404 0.0796453
+-0.742024 0.531727 0.394338 0.105662
+-0.775212 0.482058 0.386583 0.131227
+-0.80508 0.430324 0.377172 0.15623
+-0.831501 0.376748 0.366147 0.180564
+-0.854361 0.321559 0.353553 0.204124
+-0.873563 0.264992 0.339446 0.226811
+-0.889024 0.207291 0.323885 0.248526
+-0.900678 0.148703 0.306937 0.269177
+-0.908475 0.089477 0.288675 0.288675
+-0.912382 0.0298684 0.269177 0.306937
+0.933521 0.0305603 -0.180053 -0.308521
+0.929524 0.0915501 -0.159489 -0.319636
+0.921546 0.152148 -0.138242 -0.329383
+0.909622 0.212094 -0.116404 -0.337719
+0.893803 0.271132 -0.0940667 -0.344609
+0.874156 0.329009 -0.0713268 -0.350024
+0.850766 0.385477 -0.0482814 -0.353939
+0.823733 0.440295 -0.0250293 -0.356339
+0.793173 0.493227 -0.00166998 -0.357213
+0.759216 0.544047 0.0216965 -0.356558
+0.722008 0.592537 0.04497 -0.354375
+0.681709 0.63849 0.068051 -0.350675
+0.63849 0.681709 0.0908405 -0.345474
+0.592537 0.722008 0.113241 -0.338793
+0.544047 0.759216 0.135157 -0.330661
+0.493227 0.793173 0.156494 -0.321114
+0.440295 0.823733 0.17716 -0.310191
+0.385477 0.850766 0.197069 -0.29794
+0.329009 0.874156 0.216133 -0.284413
+0.271132 0.893803 0.234272 -0.269668
+0.212094 0.909622 0.251407 -0.253769
+0.152148 0.921546 0.267466 -0.236783
+0.09155 0.929524 0.28238 -0.218783
+0.0305602 0.933521 0.296084 -0.199846
+-0.0305604 0.933521 0.308521 -0.180053
+-0.0915502 0.929524 0.319636 -0.159489
+-0.152148 0.921546 0.329383 -0.138242
+-0.212094 0.909622 0.337719 -0.116404
+-0.271132 0.893803 0.344609 -0.0940666
+-0.329009 0.874156 0.350024 -0.0713267
+-0.385477 0.850766 0.353939 -0.0482813
+-0.440295 0.823733 0.356339 -0.0250293
+-0.493227 0.793173 0.357213 -0.00166998
+-0.544047 0.759216 0.356558 0.0216965
+-0.592537 0.722008 0.354375 0.04497
+-0.63849 0.681709 0.350675 0.068051
+-0.681709 0.63849 0.345474 0.0908405
+-0.722008 0.592537 0.338793 0.113241
+-0.759216 0.544047 0.330661 0.135157
+-0.793173 0.493227 0.321114 0.156494
+-0.823733 0.440295 0.310191 0.17716
+-0.850766 0.385477 0.29794 0.197069
+-0.874156 0.329009 0.284413 0.216133
+-0.893803 0.271132 0.269668 0.234272
+-0.909622 0.212094 0.253769 0.251407
+-0.921546 0.152148 0.236783 0.267466
+-0.929524 0.0915501 0.218783 0.28238
+-0.933521 0.0305604 0.199846 0.296084
+0.933521 0.0305603 -0.106886 -0.340851
+0.929524 0.0915501 -0.0843647 -0.347112
+0.921546 0.152148 -0.0614818 -0.351887
+0.909622 0.212094 -0.0383357 -0.355154
+0.893803 0.271132 -0.0150254 -0.356901
+0.874156 0.329009 0.00834917 -0.35712
+0.850766 0.385477 0.031688 -0.355809
+0.823733 0.440295 0.0548912 -0.352975
+0.793173 0.493227 0.0778593 -0.348629
+0.759216 0.544047 0.100494 -0.34279
+0.722008 0.592537 0.122698 -0.335484
+0.681709 0.63849 0.144377 -0.32674
+0.63849 0.681709 0.165438 -0.316598
+0.592537 0.722008 0.18579 -0.3051
+0.544047 0.759216 0.205347 -0.292296
+0.493227 0.793173 0.224025 -0.278239
+0.440295 0.823733 0.241743 -0.262992
+0.385477 0.850766 0.258426 -0.246618
+0.329009 0.874156 0.274002 -0.229188
+0.271132 0.893803 0.288405 -0.210777
+0.212094 0.909622 0.301573 -0.191463
+0.152148 0.921546 0.313449 -0.171329
+0.09155 0.929524 0.323984 -0.150462
+0.0305602 0.933521 0.333131 -0.12895
+-0.0305604 0.933521 0.340851 -0.106886
+-0.0915502 0.929524 0.347112 -0.0843647
+-0.152148 0.921546 0.351887 -0.0614818
+-0.212094 0.909622 0.355154 -0.0383357
+-0.271132 0.893803 0.356901 -0.0150254
+-0.329009 0.874156 0.35712 0.00834923
+-0.385477 0.850766 0.355809 0.0316881
+-0.440295 0.823733 0.352975 0.0548912
+-0.493227 0.793173 0.348629 0.0778593
+-0.544047 0.759216 0.34279 0.100494
+-0.592537 0.722008 0.335484 0.122698
+-0.63849 0.681709 0.32674 0.144377
+-0.681709 0.63849 0.316598 0.165438
+-0.722008 0.592537 0.3051 0.18579
+-0.759216 0.544047 0.292296 0.205347
+-0.793173 0.493227 0.278239 0.224025
+-0.823733 0.440295 0.262992 0.241743
+-0.850766 0.385477 0.246618 0.258425
+-0.874156 0.329009 0.229188 0.274002
+-0.893803 0.271132 0.210777 0.288405
+-0.909622 0.212094 0.191463 0.301573
+-0.921546 0.152148 0.171329 0.313449
+-0.929524 0.0915501 0.150462 0.323984
+-0.933521 0.0305604 0.12895 0.333131
+0.951462 0.0311476 -0.0300115 -0.304712
+0.947388 0.0933095 -0.0100181 -0.306022
+0.939256 0.155072 0.0100181 -0.306022
+0.927103 0.21617 0.0300115 -0.304712
+0.91098 0.276343 0.0498763 -0.302097
+0.890956 0.335332 0.0695276 -0.298188
+0.867117 0.392885 0.0888812 -0.293002
+0.839564 0.448756 0.107854 -0.286561
+0.808416 0.502706 0.126365 -0.278894
+0.773807 0.554502 0.144335 -0.270032
+0.735884 0.603924 0.161687 -0.260014
+0.69481 0.650761 0.178347 -0.248882
+0.65076 0.69481 0.194242 -0.236685
+0.603924 0.735884 0.209307 -0.223474
+0.554502 0.773807 0.223474 -0.209307
+0.502706 0.808416 0.236685 -0.194242
+0.448756 0.839564 0.248882 -0.178347
+0.392885 0.867117 0.260014 -0.161687
+0.335332 0.890956 0.270032 -0.144335
+0.276343 0.91098 0.278894 -0.126365
+0.21617 0.927103 0.286561 -0.107854
+0.155072 0.939256 0.293002 -0.0888811
+0.0933094 0.947388 0.298188 -0.0695276
+0.0311475 0.951462 0.302097 -0.0498763
+-0.0311477 0.951462 0.304712 -0.0300115
+-0.0933096 0.947388 0.306022 -0.0100181
+-0.155072 0.939256 0.306022 0.0100182
+-0.21617 0.927103 0.304712 0.0300115
+-0.276343 0.91098 0.302097 0.0498764
+-0.335332 0.890956 0.298188 0.0695277
+-0.392886 0.867116 0.293002 0.0888812
+-0.448756 0.839564 0.286562 0.107854
+-0.502706 0.808416 0.278894 0.126365
+-0.554502 0.773807 0.270032 0.144335
+-0.603924 0.735884 0.260014 0.161687
+-0.650761 0.69481 0.248882 0.178347
+-0.69481 0.650761 0.236685 0.194242
+-0.735884 0.603924 0.223474 0.209307
+-0.773807 0.554502 0.209307 0.223474
+-0.808416 0.502706 0.194242 0.236685
+-0.839564 0.448756 0.178347 0.248882
+-0.867117 0.392885 0.161687 0.260014
+-0.890956 0.335332 0.144335 0.270032
+-0.91098 0.276343 0.126365 0.278894
+-0.927103 0.21617 0.107854 0.286562
+-0.939256 0.155072 0.0888812 0.293002
+-0.947388 0.0933095 0.0695276 0.298188
+-0.951462 0.0311477 0.0498764 0.302097
+0.951462 0.0311476 -0.107854 -0.286561
+0.947388 0.0933095 -0.0888812 -0.293002
+0.939256 0.155072 -0.0695276 -0.298188
+0.927103 0.21617 -0.0498763 -0.302097
+0.91098 0.276343 -0.0300115 -0.304712
+0.890956 0.335332 -0.0100181 -0.306022
+0.867117 0.392885 0.0100181 -0.306022
+0.839564 0.448756 0.0300115 -0.304712
+0.808416 0.502706 0.0498764 -0.302097
+0.773807 0.554502 0.0695276 -0.298188
+0.735884 0.603924 0.0888812 -0.293002
+0.69481 0.650761 0.107854 -0.286561
+0.65076 0.69481 0.126365 -0.278894
+0.603924 0.735884 0.144335 -0.270032
+0.554502 0.773807 0.161687 -0.260014
+0.502706 0.808416 0.178347 -0.248882
+0.448756 0.839564 0.194242 -0.236685
+0.392885 0.867117 0.209307 -0.223474
+0.335332 0.890956 0.223474 -0.209307
+0.276343 0.91098 0.236685 -0.194242
+0.21617 0.927103 0.248882 -0.178347
+0.155072 0.939256 0.260014 -0.161687
+0.0933094 0.947388 0.270032 -0.144335
+0.0311475 0.951462 0.278894 -0.126365
+-0.0311477 0.951462 0.286562 -0.107854
+-0.0933096 0.947388 0.293002 -0.0888811
+-0.155072 0.939256 0.298188 -0.0695276
+-0.21617 0.927103 0.302097 -0.0498763
+-0.276343 0.91098 0.304712 -0.0300114
+-0.335332 0.890956 0.306022 -0.0100181
+-0.392886 0.867116 0.306022 0.0100182
+-0.448756 0.839564 0.304712 0.0300115
+-0.502706 0.808416 0.302097 0.0498763
+-0.554502 0.773807 0.298188 0.0695277
+-0.603924 0.735884 0.293002 0.0888812
+-0.650761 0.69481 0.286561 0.107854
+-0.69481 0.650761 0.278894 0.126365
+-0.735884 0.603924 0.270032 0.144335
+-0.773807 0.554502 0.260014 0.161687
+-0.808416 0.502706 0.248882 0.178347
+-0.839564 0.448756 0.236685 0.194242
+-0.867117 0.392885 0.223474 0.209307
+-0.890956 0.335332 0.209307 0.223474
+-0.91098 0.276343 0.194242 0.236685
+-0.927103 0.21617 0.178347 0.248882
+-0.939256 0.155072 0.161687 0.260014
+-0.947388 0.0933095 0.144335 0.270032
+-0.951462 0.0311477 0.126365 0.278894
+0.966382 0.0316361 -0.031648 -0.253185
+0.962244 0.0947728 -0.0150212 -0.254713
+0.953986 0.157504 0.00166997 -0.25515
+0.941642 0.21956 0.018354 -0.254494
+0.925266 0.280676 0.0349594 -0.252749
+0.904928 0.340591 0.0514151 -0.249921
+0.880714 0.399046 0.0676507 -0.246023
+0.85273 0.455794 0.0835965 -0.241072
+0.821094 0.510589 0.0991844 -0.235089
+0.785942 0.563198 0.114348 -0.228098
+0.747424 0.613395 0.129021 -0.220131
+0.705706 0.660965 0.143142 -0.211221
+0.660965 0.705706 0.15665 -0.201407
+0.613395 0.747424 0.169487 -0.190731
+0.563198 0.785942 0.181599 -0.179237
+0.510589 0.821094 0.192933 -0.166976
+0.455793 0.85273 0.203441 -0.154
+0.399046 0.880714 0.213077 -0.140365
+0.340591 0.904928 0.221801 -0.126129
+0.280676 0.925266 0.229575 -0.111352
+0.21956 0.941642 0.236367 -0.0960987
+0.157504 0.953986 0.242146 -0.0804338
+0.0947727 0.962244 0.246888 -0.0644245
+0.031636 0.966382 0.250573 -0.0481393
+-0.0316362 0.966382 0.253185 -0.031648
+-0.0947729 0.962244 0.254713 -0.0150212
+-0.157504 0.953986 0.25515 0.00166999
+-0.21956 0.941642 0.254494 0.018354
+-0.280676 0.925266 0.252749 0.0349595
+-0.340591 0.904927 0.249921 0.0514152
+-0.399047 0.880714 0.246023 0.0676507
+-0.455793 0.85273 0.241072 0.0835965
+-0.510589 0.821094 0.235089 0.0991844
+-0.563198 0.785941 0.228098 0.114348
+-0.613395 0.747424 0.220131 0.129021
+-0.660966 0.705706 0.211221 0.143142
+-0.705706 0.660966 0.201407 0.15665
+-0.747424 0.613395 0.190731 0.169487
+-0.785942 0.563198 0.179237 0.181599
+-0.821094 0.510589 0.166976 0.192933
+-0.85273 0.455794 0.154 0.20344
+-0.880714 0.399046 0.140365 0.213077
+-0.904928 0.340591 0.126129 0.221801
+-0.925266 0.280676 0.111352 0.229575
+-0.941642 0.21956 0.0960987 0.236367
+-0.953986 0.157504 0.0804339 0.242146
+-0.962244 0.0947727 0.0644245 0.246888
+-0.966382 0.0316362 0.0481394 0.250573
+0.841175 0.0275372 -0.491923 -0.222887
+0.837573 0.0824937 -0.476292 -0.254583
+0.830384 0.137097 -0.458622 -0.285189
+0.81964 0.191113 -0.438987 -0.314574
+0.805385 0.244311 -0.417473 -0.342612
+0.787682 0.296463 -0.394172 -0.369182
+0.766606 0.347345 -0.369182 -0.394172
+0.742247 0.396739 -0.342612 -0.417473
+0.71471 0.444435 -0.314574 -0.438987
+0.684112 0.490228 -0.285189 -0.458622
+0.650585 0.533921 -0.254583 -0.476292
+0.614272 0.575329 -0.222887 -0.491923
+0.575329 0.614272 -0.190237 -0.505447
+0.533922 0.650585 -0.156772 -0.516807
+0.490228 0.684112 -0.122635 -0.525954
+0.444435 0.71471 -0.0879736 -0.532848
+0.396739 0.742247 -0.0529353 -0.537461
+0.347345 0.766606 -0.0176703 -0.539773
+0.296463 0.787682 0.0176704 -0.539773
+0.244311 0.805385 0.0529353 -0.537461
+0.191113 0.81964 0.0879736 -0.532848
+0.137097 0.830384 0.122635 -0.525954
+0.0824936 0.837573 0.156772 -0.516807
+0.0275371 0.841175 0.190237 -0.505447
+-0.0275373 0.841175 0.222887 -0.491923
+-0.0824938 0.837573 0.254583 -0.476292
+-0.137097 0.830384 0.285189 -0.458622
+-0.191113 0.81964 0.314574 -0.438987
+-0.244311 0.805385 0.342612 -0.417473
+-0.296463 0.787682 0.369182 -0.394172
+-0.347345 0.766606 0.394172 -0.369182
+-0.396739 0.742247 0.417473 -0.342612
+-0.444435 0.71471 0.438987 -0.314574
+-0.490228 0.684112 0.458622 -0.285189
+-0.533921 0.650585 0.476292 -0.254583
+-0.575329 0.614272 0.491923 -0.222887
+-0.614272 0.575329 0.505447 -0.190237
+-0.650585 0.533921 0.516807 -0.156772
+-0.684112 0.490228 0.525954 -0.122635
+-0.71471 0.444435 0.532848 -0.0879736
+-0.742247 0.39674 0.537461 -0.0529354
+-0.766606 0.347345 0.539773 -0.0176703
+-0.787682 0.296463 0.539773 0.0176704
+-0.805385 0.244311 0.537461 0.0529353
+-0.81964 0.191113 0.532848 0.0879736
+-0.830384 0.137097 0.525954 0.122635
+-0.837573 0.0824937 0.516807 0.156772
+-0.841175 0.0275373 0.505447 0.190237
+0.865562 0.0283356 -0.433013 -0.25
+0.861855 0.0848853 -0.415735 -0.277785
+0.854458 0.141072 -0.396677 -0.304381
+0.843402 0.196654 -0.37592 -0.329673
+0.828735 0.251394 -0.353553 -0.353553
+0.810518 0.305057 -0.329673 -0.37592
+0.788831 0.357415 -0.304381 -0.396677
+0.763766 0.408242 -0.277785 -0.415735
+0.735431 0.45732 -0.25 -0.433013
+0.703946 0.50444 -0.221144 -0.448436
+0.669447 0.549401 -0.191342 -0.46194
+0.632081 0.592008 -0.16072 -0.473465
+0.592008 0.632081 -0.12941 -0.482963
+0.549401 0.669447 -0.0975452 -0.490393
+0.50444 0.703946 -0.0652631 -0.495722
+0.45732 0.735431 -0.0327015 -0.498929
+0.408241 0.763766 4.11028e-08 -0.5
+0.357415 0.788831 0.0327016 -0.498929
+0.305057 0.810518 0.0652631 -0.495722
+0.251394 0.828735 0.0975452 -0.490393
+0.196654 0.843402 0.12941 -0.482963
+0.141072 0.854458 0.16072 -0.473465
+0.0848852 0.861855 0.191342 -0.46194
+0.0283355 0.865562 0.221144 -0.448436
+-0.0283356 0.865562 0.25 -0.433013
+-0.0848854 0.861855 0.277785 -0.415735
+-0.141072 0.854458 0.304381 -0.396677
+-0.196654 0.843402 0.329673 -0.37592
+-0.251394 0.828735 0.353553 -0.353553
+-0.305058 0.810518 0.37592 -0.329673
+-0.357415 0.788831 0.396677 -0.304381
+-0.408241 0.763766 0.415735 -0.277785
+-0.45732 0.735431 0.433013 -0.25
+-0.504441 0.703946 0.448436 -0.221144
+-0.549401 0.669447 0.46194 -0.191342
+-0.592008 0.632081 0.473465 -0.16072
+-0.632081 0.592008 0.482963 -0.12941
+-0.669447 0.549401 0.490393 -0.0975451
+-0.703946 0.504441 0.495722 -0.0652631
+-0.735431 0.45732 0.498929 -0.0327016
+-0.763766 0.408242 0.5 -5.62508e-08
+-0.788831 0.357415 0.498929 0.0327016
+-0.810518 0.305057 0.495722 0.0652631
+-0.828735 0.251394 0.490393 0.0975451
+-0.843402 0.196654 0.482963 0.12941
+-0.854458 0.141072 0.473465 0.16072
+-0.861855 0.0848853 0.46194 0.191342
+-0.865562 0.0283356 0.448436 0.221144
+0.865562 0.0283356 -0.473465 -0.16072
+0.861855 0.0848853 -0.46194 -0.191342
+0.854458 0.141072 -0.448436 -0.221144
+0.843402 0.196654 -0.433013 -0.25
+0.828735 0.251394 -0.415735 -0.277785
+0.810518 0.305057 -0.396677 -0.304381
+0.788831 0.357415 -0.37592 -0.329673
+0.763766 0.408242 -0.353553 -0.353553
+0.735431 0.45732 -0.329673 -0.37592
+0.703946 0.50444 -0.304381 -0.396677
+0.669447 0.549401 -0.277785 -0.415735
+0.632081 0.592008 -0.25 -0.433013
+0.592008 0.632081 -0.221144 -0.448436
+0.549401 0.669447 -0.191342 -0.46194
+0.50444 0.703946 -0.16072 -0.473465
+0.45732 0.735431 -0.129409 -0.482963
+0.408241 0.763766 -0.0975451 -0.490393
+0.357415 0.788831 -0.0652631 -0.495722
+0.305057 0.810518 -0.0327015 -0.498929
+0.251394 0.828735 2.1483e-08 -0.5
+0.196654 0.843402 0.0327016 -0.498929
+0.141072 0.854458 0.0652632 -0.495722
+0.0848852 0.861855 0.0975452 -0.490393
+0.0283355 0.865562 0.12941 -0.482963
+-0.0283356 0.865562 0.16072 -0.473465
+-0.0848854 0.861855 0.191342 -0.46194
+-0.141072 0.854458 0.221144 -0.448436
+-0.196654 0.843402 0.25 -0.433013
+-0.251394 0.828735 0.277785 -0.415735
+-0.305058 0.810518 0.304381 -0.396677
+-0.357415 0.788831 0.329673 -0.37592
+-0.408241 0.763766 0.353553 -0.353553
+-0.45732 0.735431 0.37592 -0.329673
+-0.504441 0.703946 0.396677 -0.304381
+-0.549401 0.669447 0.415735 -0.277785
+-0.592008 0.632081 0.433013 -0.25
+-0.632081 0.592008 0.448436 -0.221144
+-0.669447 0.549401 0.46194 -0.191342
+-0.703946 0.504441 0.473465 -0.16072
+-0.735431 0.45732 0.482963 -0.12941
+-0.763766 0.408242 0.490393 -0.0975452
+-0.788831 0.357415 0.495722 -0.0652631
+-0.810518 0.305057 0.498929 -0.0327015
+-0.828735 0.251394 0.5 -1.62659e-08
+-0.843402 0.196654 0.498929 0.0327016
+-0.854458 0.141072 0.495722 0.0652631
+-0.861855 0.0848853 0.490393 0.0975452
+-0.865562 0.0283356 0.482963 0.129409
+0.88928 0.029112 -0.415751 -0.188374
+0.885472 0.0872114 -0.40254 -0.215162
+0.877872 0.144937 -0.387606 -0.241029
+0.866513 0.202043 -0.371012 -0.265863
+0.851444 0.258283 -0.352829 -0.28956
+0.832728 0.313417 -0.333136 -0.312016
+0.810447 0.367209 -0.312016 -0.333136
+0.784695 0.419428 -0.28956 -0.352829
+0.755583 0.469852 -0.265863 -0.371012
+0.723236 0.518263 -0.241029 -0.387606
+0.687791 0.564456 -0.215162 -0.40254
+0.649401 0.608231 -0.188374 -0.415751
+0.608231 0.649401 -0.160779 -0.427181
+0.564456 0.687791 -0.132496 -0.436782
+0.518263 0.723236 -0.103646 -0.444512
+0.469852 0.755583 -0.0743512 -0.450339
+0.419428 0.784695 -0.0447385 -0.454238
+0.367209 0.810447 -0.0149341 -0.456191
+0.313417 0.832728 0.0149342 -0.456191
+0.258283 0.851444 0.0447385 -0.454238
+0.202043 0.866513 0.0743513 -0.450339
+0.144937 0.877872 0.103646 -0.444512
+0.0872113 0.885472 0.132496 -0.436781
+0.0291119 0.88928 0.160779 -0.427181
+-0.0291121 0.88928 0.188374 -0.415751
+-0.0872115 0.885472 0.215162 -0.40254
+-0.144937 0.877872 0.241029 -0.387606
+-0.202043 0.866513 0.265863 -0.371012
+-0.258283 0.851444 0.28956 -0.352829
+-0.313417 0.832728 0.312016 -0.333136
+-0.367209 0.810447 0.333136 -0.312016
+-0.419428 0.784695 0.352829 -0.28956
+-0.469852 0.755583 0.371012 -0.265863
+-0.518263 0.723236 0.387606 -0.241029
+-0.564456 0.687791 0.40254 -0.215162
+-0.608231 0.649401 0.415751 -0.188374
+-0.649401 0.608231 0.427181 -0.160779
+-0.687791 0.564456 0.436782 -0.132496
+-0.723236 0.518263 0.444512 -0.103646
+-0.755583 0.469852 0.450339 -0.0743513
+-0.784695 0.419428 0.454238 -0.0447386
+-0.810447 0.367209 0.456191 -0.0149342
+-0.832728 0.313417 0.456191 0.0149342
+-0.851444 0.258283 0.454238 0.0447385
+-0.866513 0.202043 0.450339 0.0743513
+-0.877872 0.144937 0.444512 0.103646
+-0.885472 0.0872113 0.436782 0.132496
+-0.88928 0.0291121 0.427181 0.160779
+0.88928 0.029112 -0.371012 -0.265863
+0.885472 0.0872114 -0.352829 -0.28956
+0.877872 0.144937 -0.333136 -0.312016
+0.866513 0.202043 -0.312016 -0.333136
+0.851444 0.258283 -0.28956 -0.352829
+0.832728 0.313417 -0.265863 -0.371012
+0.810447 0.367209 -0.241029 -0.387606
+0.784695 0.419428 -0.215162 -0.40254
+0.755583 0.469852 -0.188374 -0.415751
+0.723236 0.518263 -0.160779 -0.427181
+0.687791 0.564456 -0.132496 -0.436782
+0.649401 0.608231 -0.103646 -0.444512
+0.608231 0.649401 -0.0743513 -0.450339
+0.564456 0.687791 -0.0447385 -0.454238
+0.518263 0.723236 -0.0149341 -0.456191
+0.469852 0.755583 0.0149342 -0.456191
+0.419428 0.784695 0.0447385 -0.454238
+0.367209 0.810447 0.0743513 -0.450339
+0.313417 0.832728 0.103646 -0.444512
+0.258283 0.851444 0.132496 -0.436782
+0.202043 0.866513 0.160779 -0.427181
+0.144937 0.877872 0.188374 -0.415751
+0.0872113 0.885472 0.215162 -0.40254
+0.0291119 0.88928 0.241029 -0.387606
+-0.0291121 0.88928 0.265864 -0.371012
+-0.0872115 0.885472 0.28956 -0.352829
+-0.144937 0.877872 0.312016 -0.333136
+-0.202043 0.866513 0.333136 -0.312016
+-0.258283 0.851444 0.352829 -0.28956
+-0.313417 0.832728 0.371012 -0.265863
+-0.367209 0.810447 0.387606 -0.241029
+-0.419428 0.784695 0.40254 -0.215162
+-0.469852 0.755583 0.415751 -0.188374
+-0.518263 0.723236 0.427181 -0.160779
+-0.564456 0.687791 0.436782 -0.132496
+-0.608231 0.649401 0.444512 -0.103646
+-0.649401 0.608231 0.450339 -0.0743513
+-0.687791 0.564456 0.454238 -0.0447385
+-0.723236 0.518263 0.456191 -0.0149342
+-0.755583 0.469852 0.456191 0.0149342
+-0.784695 0.419428 0.454238 0.0447384
+-0.810447 0.367209 0.450339 0.0743513
+-0.832728 0.313417 0.444512 0.103646
+-0.851444 0.258283 0.436782 0.132496
+-0.866513 0.202043 0.427181 0.160779
+-0.877872 0.144937 0.415751 0.188374
+-0.885472 0.0872113 0.40254 0.215162
+-0.88928 0.0291121 0.387606 0.241029
+0.912382 0.0298683 -0.306937 -0.269177
+0.908475 0.089477 -0.288675 -0.288675
+0.900678 0.148703 -0.269177 -0.306937
+0.889024 0.207291 -0.248526 -0.323885
+0.873563 0.264992 -0.226811 -0.339446
+0.854361 0.321559 -0.204124 -0.353553
+0.831501 0.376748 -0.180564 -0.366147
+0.80508 0.430324 -0.15623 -0.377172
+0.775212 0.482058 -0.131227 -0.386583
+0.742024 0.531727 -0.105662 -0.394338
+0.705659 0.579119 -0.0796453 -0.400404
+0.666272 0.624032 -0.0532871 -0.404756
+0.624032 0.666272 -0.0267007 -0.407374
+0.579119 0.705659 9.12808e-10 -0.408248
+0.531727 0.742024 0.0267007 -0.407374
+0.482058 0.775212 0.0532871 -0.404756
+0.430324 0.80508 0.0796453 -0.400404
+0.376748 0.831501 0.105662 -0.394338
+0.321559 0.854361 0.131227 -0.386583
+0.264992 0.873563 0.15623 -0.377172
+0.207291 0.889024 0.180564 -0.366147
+0.148702 0.900678 0.204124 -0.353553
+0.0894769 0.908475 0.226811 -0.339446
+0.0298682 0.912382 0.248526 -0.323885
+-0.0298684 0.912382 0.269177 -0.306937
+-0.0894771 0.908475 0.288675 -0.288675
+-0.148703 0.900678 0.306937 -0.269177
+-0.207291 0.889024 0.323885 -0.248526
+-0.264993 0.873563 0.339446 -0.226811
+-0.321559 0.854361 0.353553 -0.204124
+-0.376748 0.831501 0.366147 -0.180564
+-0.430324 0.80508 0.377172 -0.15623
+-0.482058 0.775212 0.386583 -0.131227
+-0.531727 0.742024 0.394338 -0.105662
+-0.579119 0.705659 0.400404 -0.0796453
+-0.624032 0.666272 0.404756 -0.0532871
+-0.666272 0.624032 0.407374 -0.0267007
+-0.705659 0.579119 0.408248 1.87579e-08
+-0.742024 0.531727 0.407374 0.0267007
+-0.775212 0.482058 0.404756 0.0532871
+-0.80508 0.430324 0.400404 0.0796452
+-0.831501 0.376748 0.394338 0.105662
+-0.854361 0.321559 0.386583 0.131227
+-0.873563 0.264992 0.377172 0.15623
+-0.889024 0.207291 0.366147 0.180564
+-0.900678 0.148703 0.353553 0.204124
+-0.908475 0.089477 0.339446 0.226811
+-0.912382 0.0298684 0.323885 0.248526
+0.912382 0.0298683 -0.353553 -0.204124
+0.908475 0.089477 -0.339446 -0.226811
+0.900678 0.148703 -0.323885 -0.248526
+0.889024 0.207291 -0.306937 -0.269177
+0.873563 0.264992 -0.288675 -0.288675
+0.854361 0.321559 -0.269177 -0.306937
+0.831501 0.376748 -0.248526 -0.323885
+0.80508 0.430324 -0.226811 -0.339446
+0.775212 0.482058 -0.204124 -0.353553
+0.742024 0.531727 -0.180564 -0.366147
+0.705659 0.579119 -0.15623 -0.377172
+0.666272 0.624032 -0.131227 -0.386583
+0.624032 0.666272 -0.105662 -0.394338
+0.579119 0.705659 -0.0796453 -0.400404
+0.531727 0.742024 -0.0532871 -0.404756
+0.482058 0.775212 -0.0267007 -0.407374
+0.430324 0.80508 3.35603e-08 -0.408248
+0.376748 0.831501 0.0267007 -0.407374
+0.321559 0.854361 0.0532871 -0.404756
+0.264992 0.873563 0.0796453 -0.400404
+0.207291 0.889024 0.105662 -0.394338
+0.148702 0.900678 0.131227 -0.386583
+0.0894769 0.908475 0.15623 -0.377172
+0.0298682 0.912382 0.180564 -0.366147
+-0.0298684 0.912382 0.204124 -0.353553
+-0.0894771 0.908475 0.226811 -0.339446
+-0.148703 0.900678 0.248526 -0.323885
+-0.207291 0.889024 0.269177 -0.306937
+-0.264993 0.873563 0.288675 -0.288675
+-0.321559 0.854361 0.306937 -0.269177
+-0.376748 0.831501 0.323885 -0.248526
+-0.430324 0.80508 0.339446 -0.226811
+-0.482058 0.775212 0.353553 -0.204124
+-0.531727 0.742024 0.366147 -0.180564
+-0.579119 0.705659 0.377172 -0.15623
+-0.624032 0.666272 0.386583 -0.131227
+-0.666272 0.624032 0.394338 -0.105662
+-0.705659 0.579119 0.400404 -0.0796453
+-0.742024 0.531727 0.404756 -0.0532871
+-0.775212 0.482058 0.407374 -0.0267007
+-0.80508 0.430324 0.408248 -4.59286e-08
+-0.831501 0.376748 0.407374 0.0267007
+-0.854361 0.321559 0.404756 0.0532871
+-0.873563 0.264992 0.400404 0.0796453
+-0.889024 0.207291 0.394338 0.105662
+-0.900678 0.148703 0.386583 0.131227
+-0.908475 0.089477 0.377172 0.15623
+-0.912382 0.0298684 0.366147 0.180564
+0.933521 0.0305603 -0.296084 -0.199846
+0.929524 0.0915501 -0.28238 -0.218783
+0.921546 0.152148 -0.267466 -0.236783
+0.909622 0.212094 -0.251407 -0.253769
+0.893803 0.271132 -0.234272 -0.269668
+0.874156 0.329009 -0.216133 -0.284413
+0.850766 0.385477 -0.197069 -0.29794
+0.823733 0.440295 -0.17716 -0.310191
+0.793173 0.493227 -0.156494 -0.321114
+0.759216 0.544047 -0.135157 -0.330661
+0.722008 0.592537 -0.113241 -0.338793
+0.681709 0.63849 -0.0908405 -0.345474
+0.63849 0.681709 -0.068051 -0.350675
+0.592537 0.722008 -0.04497 -0.354375
+0.544047 0.759216 -0.0216964 -0.356558
+0.493227 0.793173 0.00167001 -0.357213
+0.440295 0.823733 0.0250293 -0.356339
+0.385477 0.850766 0.0482814 -0.353939
+0.329009 0.874156 0.0713268 -0.350024
+0.271132 0.893803 0.0940667 -0.344609
+0.212094 0.909622 0.116404 -0.337719
+0.152148 0.921546 0.138243 -0.329383
+0.09155 0.929524 0.159489 -0.319636
+0.0305602 0.933521 0.180053 -0.308521
+-0.0305604 0.933521 0.199846 -0.296084
+-0.0915502 0.929524 0.218783 -0.28238
+-0.152148 0.921546 0.236783 -0.267466
+-0.212094 0.909622 0.253769 -0.251407
+-0.271132 0.893803 0.269668 -0.234272
+-0.329009 0.874156 0.284413 -0.216133
+-0.385477 0.850766 0.29794 -0.197069
+-0.440295 0.823733 0.310191 -0.17716
+-0.493227 0.793173 0.321114 -0.156494
+-0.544047 0.759216 0.330661 -0.135157
+-0.592537 0.722008 0.338793 -0.113241
+-0.63849 0.681709 0.345474 -0.0908405
+-0.681709 0.63849 0.350675 -0.068051
+-0.722008 0.592537 0.354375 -0.04497
+-0.759216 0.544047 0.356558 -0.0216965
+-0.793173 0.493227 0.357213 0.00166999
+-0.823733 0.440295 0.356339 0.0250292
+-0.850766 0.385477 0.353939 0.0482814
+-0.874156 0.329009 0.350024 0.0713268
+-0.893803 0.271132 0.344609 0.0940667
+-0.909622 0.212094 0.337719 0.116404
+-0.921546 0.152148 0.329383 0.138242
+-0.929524 0.0915501 0.319636 0.159489
+-0.933521 0.0305604 0.308521 0.180053
+0.88928 0.029112 -0.444512 -0.103646
+0.885472 0.0872114 -0.436782 -0.132496
+0.877872 0.144937 -0.427181 -0.160779
+0.866513 0.202043 -0.415751 -0.188374
+0.851444 0.258283 -0.40254 -0.215162
+0.832728 0.313417 -0.387606 -0.241029
+0.810447 0.367209 -0.371012 -0.265863
+0.784695 0.419428 -0.352829 -0.28956
+0.755583 0.469852 -0.333136 -0.312016
+0.723236 0.518263 -0.312016 -0.333136
+0.687791 0.564456 -0.28956 -0.352829
+0.649401 0.608231 -0.265863 -0.371012
+0.608231 0.649401 -0.241029 -0.387606
+0.564456 0.687791 -0.215162 -0.40254
+0.518263 0.723236 -0.188374 -0.415751
+0.469852 0.755583 -0.160779 -0.427181
+0.419428 0.784695 -0.132496 -0.436782
+0.367209 0.810447 -0.103646 -0.444512
+0.313417 0.832728 -0.0743512 -0.450339
+0.258283 0.851444 -0.0447385 -0.454238
+0.202043 0.866513 -0.0149341 -0.456191
+0.144937 0.877872 0.0149342 -0.456191
+0.0872113 0.885472 0.0447386 -0.454238
+0.0291119 0.88928 0.0743513 -0.450339
+-0.0291121 0.88928 0.103646 -0.444512
+-0.0872115 0.885472 0.132496 -0.436781
+-0.144937 0.877872 0.160779 -0.427181
+-0.202043 0.866513 0.188374 -0.415751
+-0.258283 0.851444 0.215162 -0.40254
+-0.313417 0.832728 0.241029 -0.387606
+-0.367209 0.810447 0.265864 -0.371012
+-0.419428 0.784695 0.28956 -0.352829
+-0.469852 0.755583 0.312016 -0.333136
+-0.518263 0.723236 0.333136 -0.312016
+-0.564456 0.687791 0.352829 -0.28956
+-0.608231 0.649401 0.371012 -0.265863
+-0.649401 0.608231 0.387606 -0.241029
+-0.687791 0.564456 0.40254 -0.215162
+-0.723236 0.518263 0.415751 -0.188374
+-0.755583 0.469852 0.427181 -0.160779
+-0.784695 0.419428 0.436781 -0.132496
+-0.810447 0.367209 0.444512 -0.103646
+-0.832728 0.313417 0.450339 -0.0743512
+-0.851444 0.258283 0.454238 -0.0447385
+-0.866513 0.202043 0.456191 -0.0149341
+-0.877872 0.144937 0.456191 0.0149341
+-0.885472 0.0872113 0.454238 0.0447385
+-0.88928 0.0291121 0.450339 0.0743512
+0.912382 0.0298683 -0.386583 -0.131227
+0.908475 0.089477 -0.377172 -0.15623
+0.900678 0.148703 -0.366147 -0.180564
+0.889024 0.207291 -0.353553 -0.204124
+0.873563 0.264992 -0.339446 -0.226811
+0.854361 0.321559 -0.323885 -0.248526
+0.831501 0.376748 -0.306937 -0.269177
+0.80508 0.430324 -0.288675 -0.288675
+0.775212 0.482058 -0.269177 -0.306937
+0.742024 0.531727 -0.248526 -0.323885
+0.705659 0.579119 -0.226811 -0.339446
+0.666272 0.624032 -0.204124 -0.353553
+0.624032 0.666272 -0.180564 -0.366147
+0.579119 0.705659 -0.15623 -0.377172
+0.531727 0.742024 -0.131227 -0.386583
+0.482058 0.775212 -0.105662 -0.394338
+0.430324 0.80508 -0.0796453 -0.400404
+0.376748 0.831501 -0.0532871 -0.404756
+0.321559 0.854361 -0.0267007 -0.407374
+0.264992 0.873563 1.75408e-08 -0.408248
+0.207291 0.889024 0.0267007 -0.407374
+0.148702 0.900678 0.0532871 -0.404756
+0.0894769 0.908475 0.0796453 -0.400404
+0.0298682 0.912382 0.105662 -0.394338
+-0.0298684 0.912382 0.131227 -0.386583
+-0.0894771 0.908475 0.15623 -0.377172
+-0.148703 0.900678 0.180564 -0.366147
+-0.207291 0.889024 0.204124 -0.353553
+-0.264993 0.873563 0.226811 -0.339446
+-0.321559 0.854361 0.248526 -0.323885
+-0.376748 0.831501 0.269177 -0.306937
+-0.430324 0.80508 0.288675 -0.288675
+-0.482058 0.775212 0.306937 -0.269177
+-0.531727 0.742024 0.323885 -0.248526
+-0.579119 0.705659 0.339446 -0.226811
+-0.624032 0.666272 0.353553 -0.204124
+-0.666272 0.624032 0.366147 -0.180564
+-0.705659 0.579119 0.377172 -0.15623
+-0.742024 0.531727 0.386583 -0.131227
+-0.775212 0.482058 0.394338 -0.105662
+-0.80508 0.430324 0.400404 -0.0796453
+-0.831501 0.376748 0.404756 -0.0532871
+-0.854361 0.321559 0.407374 -0.0267007
+-0.873563 0.264992 0.408248 -1.32811e-08
+-0.889024 0.207291 0.407374 0.0267007
+-0.900678 0.148703 0.404756 0.0532871
+-0.908475 0.089477 0.400404 0.0796453
+-0.912382 0.0298684 0.394338 0.105662
+0.912382 0.0298683 -0.404756 -0.0532871
+0.908475 0.089477 -0.400404 -0.0796453
+0.900678 0.148703 -0.394338 -0.105662
+0.889024 0.207291 -0.386583 -0.131227
+0.873563 0.264992 -0.377172 -0.15623
+0.854361 0.321559 -0.366147 -0.180564
+0.831501 0.376748 -0.353553 -0.204124
+0.80508 0.430324 -0.339446 -0.226811
+0.775212 0.482058 -0.323885 -0.248526
+0.742024 0.531727 -0.306937 -0.269177
+0.705659 0.579119 -0.288675 -0.288675
+0.666272 0.624032 -0.269177 -0.306937
+0.624032 0.666272 -0.248526 -0.323885
+0.579119 0.705659 -0.226811 -0.339446
+0.531727 0.742024 -0.204124 -0.353553
+0.482058 0.775212 -0.180564 -0.366147
+0.430324 0.80508 -0.15623 -0.377172
+0.376748 0.831501 -0.131227 -0.386583
+0.321559 0.854361 -0.105662 -0.394338
+0.264992 0.873563 -0.0796453 -0.400404
+0.207291 0.889024 -0.0532871 -0.404756
+0.148702 0.900678 -0.0267007 -0.407374
+0.0894769 0.908475 5.01883e-08 -0.408248
+0.0298682 0.912382 0.0267008 -0.407374
+-0.0298684 0.912382 0.0532871 -0.404756
+-0.0894771 0.908475 0.0796453 -0.400404
+-0.148703 0.900678 0.105662 -0.394338
+-0.207291 0.889024 0.131227 -0.386583
+-0.264993 0.873563 0.15623 -0.377172
+-0.321559 0.854361 0.180564 -0.366147
+-0.376748 0.831501 0.204124 -0.353553
+-0.430324 0.80508 0.226811 -0.339446
+-0.482058 0.775212 0.248526 -0.323885
+-0.531727 0.742024 0.269177 -0.306937
+-0.579119 0.705659 0.288675 -0.288675
+-0.624032 0.666272 0.306937 -0.269177
+-0.666272 0.624032 0.323885 -0.248526
+-0.705659 0.579119 0.339446 -0.226811
+-0.742024 0.531727 0.353553 -0.204124
+-0.775212 0.482058 0.366147 -0.180564
+-0.80508 0.430324 0.377172 -0.15623
+-0.831501 0.376748 0.386583 -0.131227
+-0.854361 0.321559 0.394338 -0.105662
+-0.873563 0.264992 0.400404 -0.0796453
+-0.889024 0.207291 0.404756 -0.0532871
+-0.900678 0.148703 0.407374 -0.0267007
+-0.908475 0.089477 0.408248 1.93664e-08
+-0.912382 0.0298684 0.407374 0.0267007
+0.933521 0.0305603 -0.353472 -0.0515886
+0.929524 0.0915501 -0.349342 -0.0745963
+0.921546 0.152148 -0.343715 -0.0972846
+0.909622 0.212094 -0.336616 -0.119556
+0.893803 0.271132 -0.328076 -0.141316
+0.874156 0.329009 -0.318131 -0.162471
+0.850766 0.385477 -0.306824 -0.18293
+0.823733 0.440295 -0.294203 -0.202605
+0.793173 0.493227 -0.280322 -0.221413
+0.759216 0.544047 -0.265241 -0.239273
+0.722008 0.592537 -0.249023 -0.256108
+0.681709 0.63849 -0.23174 -0.271847
+0.63849 0.681709 -0.213464 -0.286421
+0.592537 0.722008 -0.194274 -0.299769
+0.544047 0.759216 -0.174252 -0.311834
+0.493227 0.793173 -0.153484 -0.322563
+0.440295 0.823733 -0.132059 -0.33191
+0.385477 0.850766 -0.110068 -0.339837
+0.329009 0.874156 -0.0876064 -0.346308
+0.271132 0.893803 -0.0647692 -0.351296
+0.212094 0.909622 -0.0416547 -0.35478
+0.152148 0.921546 -0.0183617 -0.356745
+0.09155 0.929524 0.00500984 -0.357182
+0.0305602 0.933521 0.0283599 -0.35609
+-0.0305604 0.933521 0.0515886 -0.353472
+-0.0915502 0.929524 0.0745963 -0.349342
+-0.152148 0.921546 0.0972847 -0.343715
+-0.212094 0.909622 0.119556 -0.336616
+-0.271132 0.893803 0.141316 -0.328076
+-0.329009 0.874156 0.162471 -0.318131
+-0.385477 0.850766 0.18293 -0.306824
+-0.440295 0.823733 0.202605 -0.294203
+-0.493227 0.793173 0.221413 -0.280322
+-0.544047 0.759216 0.239273 -0.265241
+-0.592537 0.722008 0.256108 -0.249023
+-0.63849 0.681709 0.271847 -0.23174
+-0.681709 0.63849 0.286421 -0.213464
+-0.722008 0.592537 0.299769 -0.194274
+-0.759216 0.544047 0.311834 -0.174252
+-0.793173 0.493227 0.322563 -0.153484
+-0.823733 0.440295 0.33191 -0.132059
+-0.850766 0.385477 0.339837 -0.110069
+-0.874156 0.329009 0.346308 -0.0876064
+-0.893803 0.271132 0.351296 -0.0647693
+-0.909622 0.212094 0.35478 -0.0416547
+-0.921546 0.152148 0.356745 -0.0183618
+-0.929524 0.0915501 0.357182 0.00500981
+-0.933521 0.0305604 0.35609 0.0283599
+0.933521 0.0305603 -0.333131 -0.12895
+0.929524 0.0915501 -0.323984 -0.150462
+0.921546 0.152148 -0.313449 -0.171329
+0.909622 0.212094 -0.301573 -0.191463
+0.893803 0.271132 -0.288405 -0.210777
+0.874156 0.329009 -0.274002 -0.229188
+0.850766 0.385477 -0.258425 -0.246618
+0.823733 0.440295 -0.241743 -0.262992
+0.793173 0.493227 -0.224025 -0.278239
+0.759216 0.544047 -0.205347 -0.292296
+0.722008 0.592537 -0.18579 -0.3051
+0.681709 0.63849 -0.165438 -0.316598
+0.63849 0.681709 -0.144377 -0.32674
+0.592537 0.722008 -0.122698 -0.335484
+0.544047 0.759216 -0.100494 -0.34279
+0.493227 0.793173 -0.0778593 -0.348629
+0.440295 0.823733 -0.0548912 -0.352975
+0.385477 0.850766 -0.031688 -0.355809
+0.329009 0.874156 -0.00834915 -0.35712
+0.271132 0.893803 0.0150255 -0.356901
+0.212094 0.909622 0.0383357 -0.355154
+0.152148 0.921546 0.0614819 -0.351887
+0.09155 0.929524 0.0843647 -0.347112
+0.0305602 0.933521 0.106886 -0.340851
+-0.0305604 0.933521 0.12895 -0.333131
+-0.0915502 0.929524 0.150462 -0.323984
+-0.152148 0.921546 0.171329 -0.313449
+-0.212094 0.909622 0.191463 -0.301573
+-0.271132 0.893803 0.210777 -0.288405
+-0.329009 0.874156 0.229188 -0.274002
+-0.385477 0.850766 0.246618 -0.258425
+-0.440295 0.823733 0.262992 -0.241743
+-0.493227 0.793173 0.278239 -0.224025
+-0.544047 0.759216 0.292296 -0.205347
+-0.592537 0.722008 0.3051 -0.18579
+-0.63849 0.681709 0.316598 -0.165438
+-0.681709 0.63849 0.32674 -0.144377
+-0.722008 0.592537 0.335484 -0.122698
+-0.759216 0.544047 0.34279 -0.100494
+-0.793173 0.493227 0.348629 -0.0778593
+-0.823733 0.440295 0.352975 -0.0548913
+-0.850766 0.385477 0.355809 -0.031688
+-0.874156 0.329009 0.35712 -0.00834914
+-0.893803 0.271132 0.356901 0.0150254
+-0.909622 0.212094 0.355154 0.0383358
+-0.921546 0.152148 0.351887 0.0614818
+-0.929524 0.0915501 0.347112 0.0843647
+-0.933521 0.0305604 0.340851 0.106886
+0.951462 0.0311476 -0.278894 -0.126365
+0.947388 0.0933095 -0.270032 -0.144335
+0.939256 0.155072 -0.260014 -0.161687
+0.927103 0.21617 -0.248882 -0.178347
+0.91098 0.276343 -0.236685 -0.194242
+0.890956 0.335332 -0.223474 -0.209307
+0.867117 0.392885 -0.209307 -0.223474
+0.839564 0.448756 -0.194242 -0.236685
+0.808416 0.502706 -0.178347 -0.248882
+0.773807 0.554502 -0.161687 -0.260014
+0.735884 0.603924 -0.144335 -0.270032
+0.69481 0.650761 -0.126365 -0.278894
+0.65076 0.69481 -0.107854 -0.286561
+0.603924 0.735884 -0.0888812 -0.293002
+0.554502 0.773807 -0.0695276 -0.298188
+0.502706 0.808416 -0.0498763 -0.302097
+0.448756 0.839564 -0.0300115 -0.304712
+0.392885 0.867117 -0.0100181 -0.306022
+0.335332 0.890956 0.0100181 -0.306022
+0.276343 0.91098 0.0300115 -0.304712
+0.21617 0.927103 0.0498764 -0.302097
+0.155072 0.939256 0.0695277 -0.298188
+0.0933094 0.947388 0.0888812 -0.293002
+0.0311475 0.951462 0.107854 -0.286561
+-0.0311477 0.951462 0.126365 -0.278894
+-0.0933096 0.947388 0.144335 -0.270032
+-0.155072 0.939256 0.161687 -0.260014
+-0.21617 0.927103 0.178347 -0.248882
+-0.276343 0.91098 0.194243 -0.236685
+-0.335332 0.890956 0.209307 -0.223474
+-0.392886 0.867116 0.223474 -0.209307
+-0.448756 0.839564 0.236685 -0.194243
+-0.502706 0.808416 0.248882 -0.178347
+-0.554502 0.773807 0.260014 -0.161687
+-0.603924 0.735884 0.270032 -0.144335
+-0.650761 0.69481 0.278894 -0.126365
+-0.69481 0.650761 0.286561 -0.107854
+-0.735884 0.603924 0.293002 -0.0888812
+-0.773807 0.554502 0.298188 -0.0695276
+-0.808416 0.502706 0.302097 -0.0498763
+-0.839564 0.448756 0.304712 -0.0300115
+-0.867117 0.392885 0.306022 -0.0100181
+-0.890956 0.335332 0.306022 0.0100182
+-0.91098 0.276343 0.304712 0.0300115
+-0.927103 0.21617 0.302097 0.0498764
+-0.939256 0.155072 0.298188 0.0695276
+-0.947388 0.0933095 0.293002 0.0888812
+-0.951462 0.0311477 0.286562 0.107854
+0.951462 0.0311476 -0.302097 -0.0498763
+0.947388 0.0933095 -0.298188 -0.0695276
+0.939256 0.155072 -0.293002 -0.0888812
+0.927103 0.21617 -0.286561 -0.107854
+0.91098 0.276343 -0.278894 -0.126365
+0.890956 0.335332 -0.270032 -0.144335
+0.867117 0.392885 -0.260014 -0.161687
+0.839564 0.448756 -0.248882 -0.178347
+0.808416 0.502706 -0.236685 -0.194242
+0.773807 0.554502 -0.223474 -0.209307
+0.735884 0.603924 -0.209307 -0.223474
+0.69481 0.650761 -0.194242 -0.236685
+0.65076 0.69481 -0.178347 -0.248882
+0.603924 0.735884 -0.161687 -0.260014
+0.554502 0.773807 -0.144335 -0.270032
+0.502706 0.808416 -0.126365 -0.278894
+0.448756 0.839564 -0.107854 -0.286562
+0.392885 0.867117 -0.0888811 -0.293002
+0.335332 0.890956 -0.0695276 -0.298188
+0.276343 0.91098 -0.0498763 -0.302097
+0.21617 0.927103 -0.0300115 -0.304712
+0.155072 0.939256 -0.0100181 -0.306022
+0.0933094 0.947388 0.0100182 -0.306022
+0.0311475 0.951462 0.0300115 -0.304712
+-0.0311477 0.951462 0.0498764 -0.302097
+-0.0933096 0.947388 0.0695276 -0.298188
+-0.155072 0.939256 0.0888812 -0.293002
+-0.21617 0.927103 0.107854 -0.286561
+-0.276343 0.91098 0.126365 -0.278894
+-0.335332 0.890956 0.144335 -0.270032
+-0.392886 0.867116 0.161687 -0.260014
+-0.448756 0.839564 0.178347 -0.248882
+-0.502706 0.808416 0.194242 -0.236685
+-0.554502 0.773807 0.209307 -0.223474
+-0.603924 0.735884 0.223474 -0.209307
+-0.650761 0.69481 0.236685 -0.194242
+-0.69481 0.650761 0.248882 -0.178347
+-0.735884 0.603924 0.260014 -0.161687
+-0.773807 0.554502 0.270032 -0.144335
+-0.808416 0.502706 0.278894 -0.126365
+-0.839564 0.448756 0.286561 -0.107854
+-0.867117 0.392885 0.293002 -0.0888812
+-0.890956 0.335332 0.298188 -0.0695276
+-0.91098 0.276343 0.302097 -0.0498764
+-0.927103 0.21617 0.304712 -0.0300115
+-0.939256 0.155072 0.306022 -0.0100182
+-0.947388 0.0933095 0.306022 0.0100181
+-0.951462 0.0311477 0.304712 0.0300115
+0.966382 0.0316361 -0.250573 -0.0481394
+0.962244 0.0947728 -0.246888 -0.0644245
+0.953986 0.157504 -0.242146 -0.0804338
+0.941642 0.21956 -0.236367 -0.0960987
+0.925266 0.280676 -0.229575 -0.111352
+0.904928 0.340591 -0.221801 -0.126129
+0.880714 0.399046 -0.213077 -0.140365
+0.85273 0.455794 -0.20344 -0.154
+0.821094 0.510589 -0.192933 -0.166976
+0.785942 0.563198 -0.181599 -0.179237
+0.747424 0.613395 -0.169487 -0.190731
+0.705706 0.660965 -0.15665 -0.201407
+0.660965 0.705706 -0.143142 -0.211221
+0.613395 0.747424 -0.129021 -0.220131
+0.563198 0.785942 -0.114348 -0.228098
+0.510589 0.821094 -0.0991844 -0.235089
+0.455793 0.85273 -0.0835965 -0.241072
+0.399046 0.880714 -0.0676507 -0.246023
+0.340591 0.904928 -0.0514151 -0.249921
+0.280676 0.925266 -0.0349594 -0.252749
+0.21956 0.941642 -0.018354 -0.254494
+0.157504 0.953986 -0.00166994 -0.25515
+0.0947727 0.962244 0.0150212 -0.254713
+0.031636 0.966382 0.0316481 -0.253185
+-0.0316362 0.966382 0.0481394 -0.250573
+-0.0947729 0.962244 0.0644246 -0.246888
+-0.157504 0.953986 0.0804339 -0.242146
+-0.21956 0.941642 0.0960987 -0.236367
+-0.280676 0.925266 0.111352 -0.229575
+-0.340591 0.904927 0.126129 -0.221801
+-0.399047 0.880714 0.140365 -0.213077
+-0.455793 0.85273 0.154 -0.203441
+-0.510589 0.821094 0.166976 -0.192933
+-0.563198 0.785941 0.179237 -0.181599
+-0.613395 0.747424 0.190731 -0.169487
+-0.660966 0.705706 0.201407 -0.15665
+-0.705706 0.660966 0.211221 -0.143142
+-0.747424 0.613395 0.220131 -0.129021
+-0.785942 0.563198 0.228098 -0.114348
+-0.821094 0.510589 0.235089 -0.0991844
+-0.85273 0.455794 0.241072 -0.0835966
+-0.880714 0.399046 0.246023 -0.0676507
+-0.904928 0.340591 0.249921 -0.0514151
+-0.925266 0.280676 0.252749 -0.0349594
+-0.941642 0.21956 0.254494 -0.018354
+-0.953986 0.157504 0.25515 -0.00166999
+-0.962244 0.0947727 0.254713 0.0150212
+-0.966382 0.0316362 0.253185 0.031648
+0.933521 0.0305603 -0.244191 -0.26072
+0.929524 0.0915501 -0.226616 -0.276133
+0.921546 0.152148 -0.208071 -0.290363
+0.909622 0.212094 -0.188635 -0.30335
+0.893803 0.271132 -0.168391 -0.315037
+0.874156 0.329009 -0.147426 -0.325376
+0.850766 0.385477 -0.12583 -0.334322
+0.823733 0.440295 -0.103695 -0.341836
+0.793173 0.493227 -0.0811156 -0.347886
+0.759216 0.544047 -0.0581891 -0.352446
+0.722008 0.592537 -0.0350134 -0.355497
+0.681709 0.63849 -0.0116878 -0.357026
+0.63849 0.681709 0.0116878 -0.357026
+0.592537 0.722008 0.0350134 -0.355497
+0.544047 0.759216 0.0581891 -0.352446
+0.493227 0.793173 0.0811156 -0.347886
+0.440295 0.823733 0.103695 -0.341836
+0.385477 0.850766 0.12583 -0.334322
+0.329009 0.874156 0.147426 -0.325376
+0.271132 0.893803 0.168391 -0.315037
+0.212094 0.909622 0.188635 -0.30335
+0.152148 0.921546 0.208071 -0.290363
+0.09155 0.929524 0.226616 -0.276133
+0.0305602 0.933521 0.244191 -0.26072
+-0.0305604 0.933521 0.26072 -0.244191
+-0.0915502 0.929524 0.276133 -0.226616
+-0.152148 0.921546 0.290363 -0.208071
+-0.212094 0.909622 0.30335 -0.188635
+-0.271132 0.893803 0.315038 -0.168391
+-0.329009 0.874156 0.325376 -0.147426
+-0.385477 0.850766 0.334322 -0.12583
+-0.440295 0.823733 0.341836 -0.103695
+-0.493227 0.793173 0.347886 -0.0811156
+-0.544047 0.759216 0.352446 -0.058189
+-0.592537 0.722008 0.355497 -0.0350134
+-0.63849 0.681709 0.357026 -0.0116878
+-0.681709 0.63849 0.357026 0.0116878
+-0.722008 0.592537 0.355497 0.0350134
+-0.759216 0.544047 0.352446 0.058189
+-0.793173 0.493227 0.347886 0.0811156
+-0.823733 0.440295 0.341836 0.103695
+-0.850766 0.385477 0.334322 0.12583
+-0.874156 0.329009 0.325376 0.147426
+-0.893803 0.271132 0.315037 0.168391
+-0.909622 0.212094 0.30335 0.188635
+-0.921546 0.152148 0.290363 0.208071
+-0.929524 0.0915501 0.276133 0.226616
+-0.933521 0.0305604 0.26072 0.244191
+0.951462 0.0311476 -0.178347 -0.248882
+0.947388 0.0933095 -0.161687 -0.260014
+0.939256 0.155072 -0.144335 -0.270032
+0.927103 0.21617 -0.126365 -0.278894
+0.91098 0.276343 -0.107854 -0.286561
+0.890956 0.335332 -0.0888812 -0.293002
+0.867117 0.392885 -0.0695276 -0.298188
+0.839564 0.448756 -0.0498763 -0.302097
+0.808416 0.502706 -0.0300115 -0.304712
+0.773807 0.554502 -0.0100181 -0.306022
+0.735884 0.603924 0.0100181 -0.306022
+0.69481 0.650761 0.0300115 -0.304712
+0.65076 0.69481 0.0498764 -0.302097
+0.603924 0.735884 0.0695276 -0.298188
+0.554502 0.773807 0.0888812 -0.293002
+0.502706 0.808416 0.107854 -0.286561
+0.448756 0.839564 0.126365 -0.278894
+0.392885 0.867117 0.144335 -0.270032
+0.335332 0.890956 0.161687 -0.260014
+0.276343 0.91098 0.178347 -0.248882
+0.21617 0.927103 0.194242 -0.236685
+0.155072 0.939256 0.209307 -0.223474
+0.0933094 0.947388 0.223474 -0.209307
+0.0311475 0.951462 0.236685 -0.194242
+-0.0311477 0.951462 0.248882 -0.178347
+-0.0933096 0.947388 0.260014 -0.161687
+-0.155072 0.939256 0.270032 -0.144335
+-0.21617 0.927103 0.278894 -0.126365
+-0.276343 0.91098 0.286562 -0.107854
+-0.335332 0.890956 0.293002 -0.0888811
+-0.392886 0.867116 0.298188 -0.0695276
+-0.448756 0.839564 0.302097 -0.0498764
+-0.502706 0.808416 0.304712 -0.0300115
+-0.554502 0.773807 0.306022 -0.0100181
+-0.603924 0.735884 0.306022 0.0100181
+-0.650761 0.69481 0.304712 0.0300115
+-0.69481 0.650761 0.302097 0.0498763
+-0.735884 0.603924 0.298188 0.0695276
+-0.773807 0.554502 0.293002 0.0888811
+-0.808416 0.502706 0.286561 0.107854
+-0.839564 0.448756 0.278894 0.126365
+-0.867117 0.392885 0.270032 0.144335
+-0.890956 0.335332 0.260014 0.161687
+-0.91098 0.276343 0.248882 0.178347
+-0.927103 0.21617 0.236685 0.194242
+-0.939256 0.155072 0.223474 0.209307
+-0.947388 0.0933095 0.209307 0.223474
+-0.951462 0.0311477 0.194243 0.236685
+0.951462 0.0311476 -0.236685 -0.194242
+0.947388 0.0933095 -0.223474 -0.209307
+0.939256 0.155072 -0.209307 -0.223474
+0.927103 0.21617 -0.194242 -0.236685
+0.91098 0.276343 -0.178347 -0.248882
+0.890956 0.335332 -0.161687 -0.260014
+0.867117 0.392885 -0.144335 -0.270032
+0.839564 0.448756 -0.126365 -0.278894
+0.808416 0.502706 -0.107854 -0.286561
+0.773807 0.554502 -0.0888812 -0.293002
+0.735884 0.603924 -0.0695276 -0.298188
+0.69481 0.650761 -0.0498763 -0.302097
+0.65076 0.69481 -0.0300115 -0.304712
+0.603924 0.735884 -0.0100181 -0.306022
+0.554502 0.773807 0.0100181 -0.306022
+0.502706 0.808416 0.0300115 -0.304712
+0.448756 0.839564 0.0498764 -0.302097
+0.392885 0.867117 0.0695276 -0.298188
+0.335332 0.890956 0.0888812 -0.293002
+0.276343 0.91098 0.107854 -0.286561
+0.21617 0.927103 0.126365 -0.278894
+0.155072 0.939256 0.144335 -0.270032
+0.0933094 0.947388 0.161687 -0.260014
+0.0311475 0.951462 0.178347 -0.248882
+-0.0311477 0.951462 0.194243 -0.236685
+-0.0933096 0.947388 0.209307 -0.223474
+-0.155072 0.939256 0.223474 -0.209307
+-0.21617 0.927103 0.236685 -0.194242
+-0.276343 0.91098 0.248882 -0.178347
+-0.335332 0.890956 0.260014 -0.161687
+-0.392886 0.867116 0.270032 -0.144335
+-0.448756 0.839564 0.278894 -0.126365
+-0.502706 0.808416 0.286561 -0.107854
+-0.554502 0.773807 0.293002 -0.0888811
+-0.603924 0.735884 0.298188 -0.0695276
+-0.650761 0.69481 0.302097 -0.0498763
+-0.69481 0.650761 0.304712 -0.0300115
+-0.735884 0.603924 0.306022 -0.0100181
+-0.773807 0.554502 0.306022 0.0100181
+-0.808416 0.502706 0.304712 0.0300115
+-0.839564 0.448756 0.302097 0.0498763
+-0.867117 0.392885 0.298188 0.0695276
+-0.890956 0.335332 0.293002 0.0888812
+-0.91098 0.276343 0.286561 0.107854
+-0.927103 0.21617 0.278894 0.126365
+-0.939256 0.155072 0.270032 0.144335
+-0.947388 0.0933095 0.260014 0.161687
+-0.951462 0.0311477 0.248882 0.178347
+0.966382 0.0316361 -0.174422 -0.186229
+0.962244 0.0947728 -0.161869 -0.197238
+0.953986 0.157504 -0.148622 -0.207402
+0.941642 0.21956 -0.134739 -0.216678
+0.925266 0.280676 -0.120279 -0.225027
+0.904928 0.340591 -0.105304 -0.232412
+0.880714 0.399046 -0.0898784 -0.238801
+0.85273 0.455794 -0.0740676 -0.244168
+0.821094 0.510589 -0.0579397 -0.24849
+0.785942 0.563198 -0.0415636 -0.251747
+0.747424 0.613395 -0.0250096 -0.253927
+0.705706 0.660965 -0.00834844 -0.255019
+0.660965 0.705706 0.00834845 -0.255019
+0.613395 0.747424 0.0250096 -0.253927
+0.563198 0.785942 0.0415636 -0.251747
+0.510589 0.821094 0.0579397 -0.24849
+0.455793 0.85273 0.0740677 -0.244168
+0.399046 0.880714 0.0898784 -0.238801
+0.340591 0.904928 0.105304 -0.232412
+0.280676 0.925266 0.120279 -0.225027
+0.21956 0.941642 0.134739 -0.216678
+0.157504 0.953986 0.148622 -0.207402
+0.0947727 0.962244 0.161869 -0.197238
+0.031636 0.966382 0.174422 -0.186229
+-0.0316362 0.966382 0.186229 -0.174422
+-0.0947729 0.962244 0.197238 -0.161869
+-0.157504 0.953986 0.207402 -0.148622
+-0.21956 0.941642 0.216678 -0.134739
+-0.280676 0.925266 0.225027 -0.120279
+-0.340591 0.904927 0.232412 -0.105304
+-0.399047 0.880714 0.238801 -0.0898784
+-0.455793 0.85273 0.244168 -0.0740677
+-0.510589 0.821094 0.24849 -0.0579397
+-0.563198 0.785941 0.251747 -0.0415636
+-0.613395 0.747424 0.253927 -0.0250096
+-0.660966 0.705706 0.255019 -0.00834843
+-0.705706 0.660966 0.255019 0.00834843
+-0.747424 0.613395 0.253927 0.0250096
+-0.785942 0.563198 0.251747 0.0415636
+-0.821094 0.510589 0.24849 0.0579397
+-0.85273 0.455794 0.244168 0.0740676
+-0.880714 0.399046 0.238801 0.0898784
+-0.904928 0.340591 0.232412 0.105304
+-0.925266 0.280676 0.225027 0.120279
+-0.941642 0.21956 0.216678 0.134739
+-0.953986 0.157504 0.207402 0.148622
+-0.962244 0.0947727 0.197238 0.161869
+-0.966382 0.0316362 0.186229 0.174422
+0.966382 0.0316361 -0.108337 -0.231013
+0.962244 0.0947728 -0.0929965 -0.237604
+0.953986 0.157504 -0.0772574 -0.243178
+0.941642 0.21956 -0.0611873 -0.24771
+0.925266 0.280676 -0.0448553 -0.251182
+0.904928 0.340591 -0.0283312 -0.253577
+0.880714 0.399046 -0.0116858 -0.254887
+0.85273 0.455794 0.00500964 -0.255106
+0.821094 0.510589 0.0216836 -0.254232
+0.785942 0.563198 0.0382648 -0.25227
+0.747424 0.613395 0.0546821 -0.249227
+0.705706 0.660965 0.0708652 -0.245117
+0.660965 0.705706 0.0867449 -0.239957
+0.613395 0.747424 0.102253 -0.23377
+0.563198 0.785942 0.117324 -0.226582
+0.510589 0.821094 0.131891 -0.218423
+0.455793 0.85273 0.145895 -0.20933
+0.399046 0.880714 0.159273 -0.19934
+0.340591 0.904928 0.17197 -0.188496
+0.280676 0.925266 0.18393 -0.176845
+0.21956 0.941642 0.195102 -0.164437
+0.157504 0.953986 0.205439 -0.151324
+0.0947727 0.962244 0.214896 -0.137564
+0.031636 0.966382 0.223433 -0.123215
+-0.0316362 0.966382 0.231013 -0.108337
+-0.0947729 0.962244 0.237604 -0.0929965
+-0.157504 0.953986 0.243178 -0.0772573
+-0.21956 0.941642 0.24771 -0.0611873
+-0.280676 0.925266 0.251182 -0.0448553
+-0.340591 0.904927 0.253577 -0.0283312
+-0.399047 0.880714 0.254887 -0.0116858
+-0.455793 0.85273 0.255106 0.00500961
+-0.510589 0.821094 0.254232 0.0216836
+-0.563198 0.785941 0.25227 0.0382648
+-0.613395 0.747424 0.249227 0.0546821
+-0.660966 0.705706 0.245117 0.0708652
+-0.705706 0.660966 0.239957 0.0867449
+-0.747424 0.613395 0.23377 0.102253
+-0.785942 0.563198 0.226582 0.117323
+-0.821094 0.510589 0.218423 0.131891
+-0.85273 0.455794 0.20933 0.145895
+-0.880714 0.399046 0.19934 0.159273
+-0.904928 0.340591 0.188496 0.17197
+-0.925266 0.280676 0.176845 0.18393
+-0.941642 0.21956 0.164437 0.195102
+-0.953986 0.157504 0.151324 0.205439
+-0.962244 0.0947727 0.137564 0.214896
+-0.966382 0.0316362 0.123215 0.223433
+0.978421 0.0320302 -0.0332509 -0.201398
+0.974231 0.0959534 -0.0200077 -0.203141
+0.96587 0.159466 -0.00667876 -0.204015
+0.953372 0.222295 0.00667876 -0.204015
+0.936792 0.284173 0.0200077 -0.203141
+0.9162 0.344833 0.0332509 -0.201398
+0.891686 0.404017 0.0463517 -0.198792
+0.863352 0.461472 0.0592541 -0.195335
+0.831322 0.516949 0.0719027 -0.191041
+0.795732 0.570214 0.0842435 -0.185929
+0.756735 0.621036 0.0962235 -0.180021
+0.714497 0.669199 0.107791 -0.173343
+0.669199 0.714497 0.118898 -0.165922
+0.621036 0.756735 0.129495 -0.15779
+0.570214 0.795732 0.139538 -0.148983
+0.516949 0.831322 0.148983 -0.139538
+0.461471 0.863352 0.15779 -0.129495
+0.404017 0.891686 0.165922 -0.118898
+0.344833 0.9162 0.173343 -0.107791
+0.284173 0.936792 0.180021 -0.0962234
+0.222295 0.953372 0.185929 -0.0842435
+0.159466 0.96587 0.191041 -0.0719027
+0.0959533 0.974231 0.195335 -0.0592541
+0.0320301 0.978421 0.198792 -0.0463517
+-0.0320303 0.978421 0.201398 -0.0332509
+-0.0959535 0.974231 0.203141 -0.0200076
+-0.159466 0.96587 0.204015 -0.00667874
+-0.222295 0.953372 0.204015 0.00667877
+-0.284173 0.936792 0.203141 0.0200077
+-0.344834 0.9162 0.201398 0.0332509
+-0.404018 0.891686 0.198792 0.0463518
+-0.461471 0.863352 0.195335 0.0592541
+-0.516949 0.831322 0.191041 0.0719027
+-0.570214 0.795732 0.185929 0.0842435
+-0.621036 0.756735 0.180021 0.0962234
+-0.669199 0.714497 0.173343 0.107791
+-0.714497 0.669199 0.165922 0.118898
+-0.756735 0.621036 0.15779 0.129495
+-0.795732 0.570214 0.148983 0.139538
+-0.831322 0.516949 0.139538 0.148983
+-0.863352 0.461472 0.129495 0.15779
+-0.891686 0.404017 0.118898 0.165922
+-0.9162 0.344833 0.107791 0.173343
+-0.936792 0.284173 0.0962235 0.180021
+-0.953372 0.222295 0.0842435 0.185929
+-0.96587 0.159466 0.0719028 0.191041
+-0.974231 0.0959533 0.0592541 0.195335
+-0.978421 0.0320303 0.0463518 0.198792
+0.978421 0.0320302 -0.107791 -0.173343
+0.974231 0.0959534 -0.0962235 -0.180021
+0.96587 0.159466 -0.0842435 -0.185929
+0.953372 0.222295 -0.0719027 -0.191041
+0.936792 0.284173 -0.0592541 -0.195335
+0.9162 0.344833 -0.0463518 -0.198792
+0.891686 0.404017 -0.0332509 -0.201398
+0.863352 0.461472 -0.0200077 -0.203141
+0.831322 0.516949 -0.00667875 -0.204015
+0.795732 0.570214 0.00667875 -0.204015
+0.756735 0.621036 0.0200077 -0.203141
+0.714497 0.669199 0.0332509 -0.201398
+0.669199 0.714497 0.0463518 -0.198792
+0.621036 0.756735 0.0592541 -0.195335
+0.570214 0.795732 0.0719027 -0.191041
+0.516949 0.831322 0.0842435 -0.185929
+0.461471 0.863352 0.0962235 -0.180021
+0.404017 0.891686 0.107791 -0.173343
+0.344833 0.9162 0.118898 -0.165922
+0.284173 0.936792 0.129495 -0.15779
+0.222295 0.953372 0.139538 -0.148983
+0.159466 0.96587 0.148983 -0.139538
+0.0959533 0.974231 0.15779 -0.129495
+0.0320301 0.978421 0.165922 -0.118898
+-0.0320303 0.978421 0.173343 -0.107791
+-0.0959535 0.974231 0.180021 -0.0962234
+-0.159466 0.96587 0.185929 -0.0842435
+-0.222295 0.953372 0.191041 -0.0719027
+-0.284173 0.936792 0.195335 -0.0592541
+-0.344834 0.9162 0.198792 -0.0463517
+-0.404018 0.891686 0.201398 -0.0332509
+-0.461471 0.863352 0.203141 -0.0200077
+-0.516949 0.831322 0.204015 -0.00667876
+-0.570214 0.795732 0.204015 0.00667877
+-0.621036 0.756735 0.203141 0.0200077
+-0.669199 0.714497 0.201398 0.0332509
+-0.714497 0.669199 0.198792 0.0463517
+-0.756735 0.621036 0.195335 0.0592541
+-0.795732 0.570214 0.191041 0.0719027
+-0.831322 0.516949 0.185929 0.0842435
+-0.863352 0.461472 0.180021 0.0962234
+-0.891686 0.404017 0.173343 0.107791
+-0.9162 0.344833 0.165922 0.118898
+-0.936792 0.284173 0.15779 0.129495
+-0.953372 0.222295 0.148983 0.139538
+-0.96587 0.159466 0.139538 0.148983
+-0.974231 0.0959533 0.129495 0.15779
+-0.978421 0.0320303 0.118898 0.165922
+0.987683 0.0323334 -0.0347638 -0.149094
+0.983453 0.0968617 -0.0249382 -0.151048
+0.975013 0.160975 -0.0150057 -0.152356
+0.962397 0.224399 -0.00500906 -0.153011
+0.94566 0.286863 0.00500907 -0.153011
+0.924873 0.348098 0.0150057 -0.152356
+0.900126 0.407842 0.0249382 -0.151048
+0.871525 0.46584 0.0347638 -0.149094
+0.839192 0.521843 0.0444406 -0.146501
+0.803265 0.575611 0.0539271 -0.143281
+0.763898 0.626915 0.0631826 -0.139447
+0.72126 0.675534 0.0721676 -0.135016
+0.675534 0.72126 0.0808436 -0.130007
+0.626915 0.763898 0.0891733 -0.124441
+0.575611 0.803265 0.0971212 -0.118343
+0.521843 0.839192 0.104653 -0.111737
+0.46584 0.871525 0.111737 -0.104653
+0.407842 0.900126 0.118343 -0.0971212
+0.348098 0.924873 0.124441 -0.0891733
+0.286863 0.94566 0.130007 -0.0808435
+0.224399 0.962397 0.135016 -0.0721676
+0.160975 0.975013 0.139447 -0.0631826
+0.0968616 0.983453 0.143281 -0.053927
+0.0323333 0.987683 0.146501 -0.0444406
+-0.0323335 0.987683 0.149094 -0.0347638
+-0.0968618 0.983453 0.151048 -0.0249382
+-0.160975 0.975013 0.152356 -0.0150057
+-0.224399 0.962397 0.153011 -0.00500906
+-0.286863 0.94566 0.153011 0.00500909
+-0.348098 0.924873 0.152356 0.0150058
+-0.407842 0.900126 0.151048 0.0249382
+-0.46584 0.871525 0.149094 0.0347638
+-0.521843 0.839192 0.146501 0.0444406
+-0.575611 0.803265 0.143281 0.0539271
+-0.626915 0.763898 0.139447 0.0631826
+-0.675534 0.72126 0.135016 0.0721676
+-0.72126 0.675534 0.130007 0.0808435
+-0.763898 0.626915 0.124441 0.0891733
+-0.803265 0.575611 0.118343 0.0971212
+-0.839192 0.521843 0.111737 0.104653
+-0.871525 0.46584 0.104653 0.111737
+-0.900126 0.407842 0.0971212 0.118343
+-0.924873 0.348098 0.0891733 0.124441
+-0.94566 0.286863 0.0808436 0.130007
+-0.962397 0.224399 0.0721676 0.135016
+-0.975013 0.160975 0.0631826 0.139447
+-0.983453 0.0968616 0.053927 0.143281
+-0.987683 0.0323335 0.0444406 0.146501
+0.966382 0.0316361 -0.223433 -0.123215
+0.962244 0.0947728 -0.214896 -0.137564
+0.953986 0.157504 -0.205439 -0.151324
+0.941642 0.21956 -0.195102 -0.164437
+0.925266 0.280676 -0.18393 -0.176845
+0.904928 0.340591 -0.17197 -0.188496
+0.880714 0.399046 -0.159273 -0.19934
+0.85273 0.455794 -0.145895 -0.20933
+0.821094 0.510589 -0.131891 -0.218423
+0.785942 0.563198 -0.117324 -0.226582
+0.747424 0.613395 -0.102253 -0.23377
+0.705706 0.660965 -0.0867449 -0.239957
+0.660965 0.705706 -0.0708652 -0.245117
+0.613395 0.747424 -0.0546821 -0.249227
+0.563198 0.785942 -0.0382648 -0.25227
+0.510589 0.821094 -0.0216836 -0.254232
+0.455793 0.85273 -0.00500962 -0.255106
+0.399046 0.880714 0.0116858 -0.254887
+0.340591 0.904928 0.0283312 -0.253577
+0.280676 0.925266 0.0448553 -0.251182
+0.21956 0.941642 0.0611874 -0.24771
+0.157504 0.953986 0.0772574 -0.243178
+0.0947727 0.962244 0.0929966 -0.237604
+0.031636 0.966382 0.108338 -0.231013
+-0.0316362 0.966382 0.123215 -0.223433
+-0.0947729 0.962244 0.137564 -0.214896
+-0.157504 0.953986 0.151324 -0.205439
+-0.21956 0.941642 0.164437 -0.195102
+-0.280676 0.925266 0.176845 -0.18393
+-0.340591 0.904927 0.188496 -0.171969
+-0.399047 0.880714 0.19934 -0.159273
+-0.455793 0.85273 0.20933 -0.145895
+-0.510589 0.821094 0.218423 -0.131891
+-0.563198 0.785941 0.226582 -0.117323
+-0.613395 0.747424 0.23377 -0.102253
+-0.660966 0.705706 0.239957 -0.0867449
+-0.705706 0.660966 0.245117 -0.0708652
+-0.747424 0.613395 0.249227 -0.0546821
+-0.785942 0.563198 0.25227 -0.0382648
+-0.821094 0.510589 0.254232 -0.0216836
+-0.85273 0.455794 0.255106 -0.00500967
+-0.880714 0.399046 0.254887 0.0116858
+-0.904928 0.340591 0.253577 0.0283313
+-0.925266 0.280676 0.251182 0.0448553
+-0.941642 0.21956 0.24771 0.0611874
+-0.953986 0.157504 0.243178 0.0772573
+-0.962244 0.0947727 0.237604 0.0929965
+-0.966382 0.0316362 0.231013 0.108337
+0.978421 0.0320302 -0.165922 -0.118898
+0.974231 0.0959534 -0.15779 -0.129495
+0.96587 0.159466 -0.148983 -0.139538
+0.953372 0.222295 -0.139538 -0.148983
+0.936792 0.284173 -0.129495 -0.15779
+0.9162 0.344833 -0.118898 -0.165922
+0.891686 0.404017 -0.107791 -0.173343
+0.863352 0.461472 -0.0962235 -0.180021
+0.831322 0.516949 -0.0842435 -0.185929
+0.795732 0.570214 -0.0719027 -0.191041
+0.756735 0.621036 -0.0592541 -0.195335
+0.714497 0.669199 -0.0463517 -0.198792
+0.669199 0.714497 -0.0332509 -0.201398
+0.621036 0.756735 -0.0200077 -0.203141
+0.570214 0.795732 -0.00667874 -0.204015
+0.516949 0.831322 0.00667877 -0.204015
+0.461471 0.863352 0.0200077 -0.203141
+0.404017 0.891686 0.0332509 -0.201398
+0.344833 0.9162 0.0463518 -0.198792
+0.284173 0.936792 0.0592541 -0.195335
+0.222295 0.953372 0.0719027 -0.191041
+0.159466 0.96587 0.0842435 -0.185929
+0.0959533 0.974231 0.0962235 -0.180021
+0.0320301 0.978421 0.107791 -0.173343
+-0.0320303 0.978421 0.118898 -0.165922
+-0.0959535 0.974231 0.129495 -0.15779
+-0.159466 0.96587 0.139538 -0.148983
+-0.222295 0.953372 0.148983 -0.139538
+-0.284173 0.936792 0.15779 -0.129495
+-0.344834 0.9162 0.165922 -0.118898
+-0.404018 0.891686 0.173343 -0.107791
+-0.461471 0.863352 0.180021 -0.0962235
+-0.516949 0.831322 0.185929 -0.0842435
+-0.570214 0.795732 0.191041 -0.0719027
+-0.621036 0.756735 0.195335 -0.0592541
+-0.669199 0.714497 0.198792 -0.0463517
+-0.714497 0.669199 0.201398 -0.0332509
+-0.756735 0.621036 0.203141 -0.0200077
+-0.795732 0.570214 0.204015 -0.00667877
+-0.831322 0.516949 0.204015 0.00667876
+-0.863352 0.461472 0.203141 0.0200076
+-0.891686 0.404017 0.201398 0.0332509
+-0.9162 0.344833 0.198792 0.0463518
+-0.936792 0.284173 0.195335 0.0592541
+-0.953372 0.222295 0.191041 0.0719028
+-0.96587 0.159466 0.185929 0.0842435
+-0.974231 0.0959533 0.180021 0.0962235
+-0.978421 0.0320303 0.173343 0.107791
+0.978421 0.0320302 -0.198792 -0.0463517
+0.974231 0.0959534 -0.195335 -0.0592541
+0.96587 0.159466 -0.191041 -0.0719027
+0.953372 0.222295 -0.185929 -0.0842435
+0.936792 0.284173 -0.180021 -0.0962235
+0.9162 0.344833 -0.173343 -0.107791
+0.891686 0.404017 -0.165922 -0.118898
+0.863352 0.461472 -0.15779 -0.129495
+0.831322 0.516949 -0.148983 -0.139538
+0.795732 0.570214 -0.139538 -0.148983
+0.756735 0.621036 -0.129495 -0.15779
+0.714497 0.669199 -0.118898 -0.165922
+0.669199 0.714497 -0.107791 -0.173343
+0.621036 0.756735 -0.0962235 -0.180021
+0.570214 0.795732 -0.0842435 -0.185929
+0.516949 0.831322 -0.0719027 -0.191041
+0.461471 0.863352 -0.0592541 -0.195335
+0.404017 0.891686 -0.0463517 -0.198792
+0.344833 0.9162 -0.0332509 -0.201398
+0.284173 0.936792 -0.0200077 -0.203141
+0.222295 0.953372 -0.00667875 -0.204015
+0.159466 0.96587 0.00667878 -0.204015
+0.0959533 0.974231 0.0200077 -0.203141
+0.0320301 0.978421 0.0332509 -0.201398
+-0.0320303 0.978421 0.0463518 -0.198792
+-0.0959535 0.974231 0.0592541 -0.195335
+-0.159466 0.96587 0.0719028 -0.191041
+-0.222295 0.953372 0.0842435 -0.185929
+-0.284173 0.936792 0.0962235 -0.180021
+-0.344834 0.9162 0.107791 -0.173343
+-0.404018 0.891686 0.118898 -0.165922
+-0.461471 0.863352 0.129495 -0.15779
+-0.516949 0.831322 0.139538 -0.148983
+-0.570214 0.795732 0.148983 -0.139538
+-0.621036 0.756735 0.15779 -0.129495
+-0.669199 0.714497 0.165922 -0.118898
+-0.714497 0.669199 0.173343 -0.107791
+-0.756735 0.621036 0.180021 -0.0962234
+-0.795732 0.570214 0.185929 -0.0842435
+-0.831322 0.516949 0.191041 -0.0719027
+-0.863352 0.461472 0.195335 -0.0592541
+-0.891686 0.404017 0.198792 -0.0463517
+-0.9162 0.344833 0.201398 -0.0332509
+-0.936792 0.284173 0.203141 -0.0200077
+-0.953372 0.222295 0.204015 -0.00667874
+-0.96587 0.159466 0.204015 0.00667874
+-0.974231 0.0959533 0.203141 0.0200077
+-0.978421 0.0320303 0.201398 0.0332509
+0.987683 0.0323334 -0.146501 -0.0444406
+0.983453 0.0968617 -0.143281 -0.0539271
+0.975013 0.160975 -0.139447 -0.0631826
+0.962397 0.224399 -0.135016 -0.0721676
+0.94566 0.286863 -0.130007 -0.0808435
+0.924873 0.348098 -0.124441 -0.0891733
+0.900126 0.407842 -0.118343 -0.0971212
+0.871525 0.46584 -0.111737 -0.104653
+0.839192 0.521843 -0.104653 -0.111737
+0.803265 0.575611 -0.0971212 -0.118343
+0.763898 0.626915 -0.0891733 -0.124441
+0.72126 0.675534 -0.0808435 -0.130007
+0.675534 0.72126 -0.0721676 -0.135016
+0.626915 0.763898 -0.0631826 -0.139447
+0.575611 0.803265 -0.053927 -0.143281
+0.521843 0.839192 -0.0444406 -0.146501
+0.46584 0.871525 -0.0347638 -0.149094
+0.407842 0.900126 -0.0249382 -0.151048
+0.348098 0.924873 -0.0150057 -0.152356
+0.286863 0.94566 -0.00500906 -0.153011
+0.224399 0.962397 0.00500907 -0.153011
+0.160975 0.975013 0.0150058 -0.152356
+0.0968616 0.983453 0.0249382 -0.151048
+0.0323333 0.987683 0.0347638 -0.149094
+-0.0323335 0.987683 0.0444406 -0.146501
+-0.0968618 0.983453 0.0539271 -0.143281
+-0.160975 0.975013 0.0631826 -0.139447
+-0.224399 0.962397 0.0721676 -0.135016
+-0.286863 0.94566 0.0808436 -0.130007
+-0.348098 0.924873 0.0891733 -0.124441
+-0.407842 0.900126 0.0971213 -0.118343
+-0.46584 0.871525 0.104653 -0.111737
+-0.521843 0.839192 0.111737 -0.104653
+-0.575611 0.803265 0.118343 -0.0971212
+-0.626915 0.763898 0.124441 -0.0891733
+-0.675534 0.72126 0.130007 -0.0808435
+-0.72126 0.675534 0.135016 -0.0721676
+-0.763898 0.626915 0.139447 -0.0631826
+-0.803265 0.575611 0.143281 -0.0539271
+-0.839192 0.521843 0.146501 -0.0444406
+-0.871525 0.46584 0.149094 -0.0347638
+-0.900126 0.407842 0.151048 -0.0249382
+-0.924873 0.348098 0.152356 -0.0150057
+-0.94566 0.286863 0.153011 -0.00500907
+-0.962397 0.224399 0.153011 0.00500908
+-0.975013 0.160975 0.152356 0.0150057
+-0.983453 0.0968616 0.151048 0.0249382
+-0.987683 0.0323335 0.149094 0.0347638
+0.987683 0.0323334 -0.104653 -0.111737
+0.983453 0.0968617 -0.0971212 -0.118343
+0.975013 0.160975 -0.0891733 -0.124441
+0.962397 0.224399 -0.0808435 -0.130007
+0.94566 0.286863 -0.0721676 -0.135016
+0.924873 0.348098 -0.0631826 -0.139447
+0.900126 0.407842 -0.0539271 -0.143281
+0.871525 0.46584 -0.0444406 -0.146501
+0.839192 0.521843 -0.0347638 -0.149094
+0.803265 0.575611 -0.0249382 -0.151048
+0.763898 0.626915 -0.0150058 -0.152356
+0.72126 0.675534 -0.00500906 -0.153011
+0.675534 0.72126 0.00500907 -0.153011
+0.626915 0.763898 0.0150057 -0.152356
+0.575611 0.803265 0.0249382 -0.151048
+0.521843 0.839192 0.0347638 -0.149094
+0.46584 0.871525 0.0444406 -0.146501
+0.407842 0.900126 0.0539271 -0.143281
+0.348098 0.924873 0.0631826 -0.139447
+0.286863 0.94566 0.0721676 -0.135016
+0.224399 0.962397 0.0808436 -0.130007
+0.160975 0.975013 0.0891733 -0.124441
+0.0968616 0.983453 0.0971213 -0.118343
+0.0323333 0.987683 0.104653 -0.111737
+-0.0323335 0.987683 0.111737 -0.104653
+-0.0968618 0.983453 0.118343 -0.0971212
+-0.160975 0.975013 0.124441 -0.0891733
+-0.224399 0.962397 0.130007 -0.0808435
+-0.286863 0.94566 0.135016 -0.0721676
+-0.348098 0.924873 0.139447 -0.0631826
+-0.407842 0.900126 0.143281 -0.053927
+-0.46584 0.871525 0.146501 -0.0444406
+-0.521843 0.839192 0.149094 -0.0347638
+-0.575611 0.803265 0.151048 -0.0249382
+-0.626915 0.763898 0.152356 -0.0150058
+-0.675534 0.72126 0.153011 -0.00500906
+-0.72126 0.675534 0.153011 0.00500906
+-0.763898 0.626915 0.152356 0.0150058
+-0.803265 0.575611 0.151048 0.0249382
+-0.839192 0.521843 0.149094 0.0347638
+-0.871525 0.46584 0.146501 0.0444406
+-0.900126 0.407842 0.143281 0.0539271
+-0.924873 0.348098 0.139447 0.0631826
+-0.94566 0.286863 0.135016 0.0721676
+-0.962397 0.224399 0.130007 0.0808436
+-0.975013 0.160975 0.124441 0.0891733
+-0.983453 0.0968616 0.118343 0.0971212
+-0.987683 0.0323335 0.111737 0.104653
+0.994245 0.0325482 -0.0359514 -0.0955205
+0.989988 0.0975053 -0.0296271 -0.0976673
+0.981491 0.162045 -0.0231759 -0.0993959
+0.968791 0.22589 -0.0166254 -0.100699
+0.951943 0.288769 -0.0100038 -0.101571
+0.931019 0.350411 -0.00333938 -0.102007
+0.906107 0.410552 0.00333938 -0.102007
+0.877316 0.468935 0.0100038 -0.101571
+0.844768 0.52531 0.0166255 -0.100699
+0.808602 0.579436 0.0231759 -0.0993959
+0.768974 0.63108 0.0296271 -0.0976673
+0.726053 0.680023 0.0359514 -0.0955205
+0.680023 0.726053 0.0421217 -0.0929646
+0.631081 0.768974 0.0481117 -0.0900107
+0.579436 0.808602 0.0538957 -0.0866713
+0.52531 0.844768 0.0594489 -0.0829608
+0.468935 0.877316 0.0647475 -0.078895
+0.410552 0.906107 0.0697689 -0.0744914
+0.350411 0.931019 0.0744914 -0.0697688
+0.288769 0.951943 0.0788951 -0.0647475
+0.22589 0.968791 0.0829608 -0.0594489
+0.162045 0.981491 0.0866713 -0.0538957
+0.0975052 0.989988 0.0900107 -0.0481117
+0.0325481 0.994245 0.0929647 -0.0421217
+-0.0325483 0.994245 0.0955205 -0.0359514
+-0.0975054 0.989988 0.0976673 -0.029627
+-0.162045 0.981491 0.0993959 -0.0231759
+-0.225891 0.968791 0.100699 -0.0166254
+-0.288769 0.951943 0.101571 -0.0100038
+-0.350411 0.931019 0.102007 -0.00333936
+-0.410552 0.906107 0.102007 0.00333939
+-0.468935 0.877316 0.101571 0.0100038
+-0.52531 0.844768 0.100699 0.0166254
+-0.579436 0.808602 0.0993959 0.0231759
+-0.63108 0.768974 0.0976673 0.0296271
+-0.680023 0.726053 0.0955205 0.0359514
+-0.726053 0.680023 0.0929647 0.0421217
+-0.768974 0.63108 0.0900107 0.0481117
+-0.808602 0.579436 0.0866713 0.0538957
+-0.844768 0.52531 0.0829608 0.0594489
+-0.877316 0.468935 0.0788951 0.0647475
+-0.906107 0.410552 0.0744914 0.0697688
+-0.931019 0.350411 0.0697688 0.0744914
+-0.951943 0.288769 0.0647475 0.078895
+-0.968791 0.22589 0.0594489 0.0829608
+-0.981491 0.162045 0.0538957 0.0866713
+-0.989988 0.0975053 0.0481117 0.0900107
+-0.994245 0.0325483 0.0421217 0.0929646
+0.994245 0.0325482 -0.0929646 -0.0421217
+0.989988 0.0975053 -0.0900107 -0.0481117
+0.981491 0.162045 -0.0866713 -0.0538957
+0.968791 0.22589 -0.0829608 -0.0594489
+0.951943 0.288769 -0.078895 -0.0647475
+0.931019 0.350411 -0.0744914 -0.0697688
+0.906107 0.410552 -0.0697688 -0.0744914
+0.877316 0.468935 -0.0647475 -0.0788951
+0.844768 0.52531 -0.0594489 -0.0829608
+0.808602 0.579436 -0.0538957 -0.0866713
+0.768974 0.63108 -0.0481117 -0.0900107
+0.726053 0.680023 -0.0421217 -0.0929647
+0.680023 0.726053 -0.0359514 -0.0955205
+0.631081 0.768974 -0.0296271 -0.0976673
+0.579436 0.808602 -0.0231759 -0.0993959
+0.52531 0.844768 -0.0166254 -0.100699
+0.468935 0.877316 -0.0100038 -0.101571
+0.410552 0.906107 -0.00333937 -0.102007
+0.350411 0.931019 0.00333938 -0.102007
+0.288769 0.951943 0.0100038 -0.101571
+0.22589 0.968791 0.0166255 -0.100699
+0.162045 0.981491 0.0231759 -0.0993959
+0.0975052 0.989988 0.0296271 -0.0976673
+0.0325481 0.994245 0.0359514 -0.0955205
+-0.0325483 0.994245 0.0421217 -0.0929646
+-0.0975054 0.989988 0.0481117 -0.0900107
+-0.162045 0.981491 0.0538957 -0.0866713
+-0.225891 0.968791 0.0594489 -0.0829608
+-0.288769 0.951943 0.0647475 -0.078895
+-0.350411 0.931019 0.0697689 -0.0744914
+-0.410552 0.906107 0.0744914 -0.0697688
+-0.468935 0.877316 0.078895 -0.0647475
+-0.52531 0.844768 0.0829608 -0.0594489
+-0.579436 0.808602 0.0866713 -0.0538957
+-0.63108 0.768974 0.0900107 -0.0481117
+-0.680023 0.726053 0.0929647 -0.0421217
+-0.726053 0.680023 0.0955205 -0.0359514
+-0.768974 0.63108 0.0976673 -0.0296271
+-0.808602 0.579436 0.0993959 -0.0231759
+-0.844768 0.52531 0.100699 -0.0166254
+-0.877316 0.468935 0.101571 -0.0100038
+-0.906107 0.410552 0.102007 -0.00333938
+-0.931019 0.350411 0.102007 0.00333939
+-0.951943 0.288769 0.101571 0.0100038
+-0.968791 0.22589 0.100699 0.0166255
+-0.981491 0.162045 0.0993959 0.0231759
+-0.989988 0.0975053 0.0976673 0.0296271
+-0.994245 0.0325483 0.0955205 0.0359514
+0.998162 0.0326765 -0.0348844 -0.0372457
+0.993888 0.0978894 -0.0323737 -0.0394475
+0.985358 0.162683 -0.0297244 -0.0414804
+0.972608 0.22678 -0.0269478 -0.0433357
+0.955694 0.289906 -0.0240559 -0.0450054
+0.934687 0.351791 -0.0210609 -0.0464823
+0.909677 0.412169 -0.0179757 -0.0477602
+0.880772 0.470783 -0.0148135 -0.0488337
+0.848096 0.52738 -0.0115879 -0.049698
+0.811788 0.581719 -0.00831273 -0.0503494
+0.772003 0.633567 -0.00500192 -0.0507853
+0.728913 0.682702 -0.00166969 -0.0510037
+0.682702 0.728913 0.00166969 -0.0510037
+0.633567 0.772003 0.00500192 -0.0507853
+0.581719 0.811788 0.00831273 -0.0503494
+0.52738 0.848096 0.0115879 -0.049698
+0.470782 0.880772 0.0148135 -0.0488337
+0.412169 0.909677 0.0179757 -0.0477602
+0.351791 0.934687 0.0210609 -0.0464823
+0.289906 0.955694 0.0240559 -0.0450054
+0.22678 0.972608 0.0269479 -0.0433357
+0.162683 0.985358 0.0297244 -0.0414804
+0.0978893 0.993888 0.0323738 -0.0394475
+0.0326763 0.998162 0.0348844 -0.0372457
+-0.0326765 0.998162 0.0372457 -0.0348844
+-0.0978895 0.993888 0.0394475 -0.0323737
+-0.162683 0.985358 0.0414804 -0.0297244
+-0.22678 0.972608 0.0433357 -0.0269478
+-0.289907 0.955693 0.0450054 -0.0240559
+-0.351791 0.934686 0.0464823 -0.0210609
+-0.412169 0.909677 0.0477603 -0.0179757
+-0.470782 0.880772 0.0488337 -0.0148135
+-0.52738 0.848096 0.049698 -0.0115879
+-0.581719 0.811788 0.0503494 -0.00831272
+-0.633567 0.772003 0.0507853 -0.00500192
+-0.682702 0.728913 0.0510037 -0.00166969
+-0.728913 0.682702 0.0510037 0.00166969
+-0.772003 0.633567 0.0507853 0.00500192
+-0.811788 0.581719 0.0503494 0.00831272
+-0.848096 0.52738 0.049698 0.0115879
+-0.880772 0.470783 0.0488337 0.0148135
+-0.909677 0.412169 0.0477602 0.0179757
+-0.934687 0.351791 0.0464823 0.0210609
+-0.955693 0.289906 0.0450054 0.0240559
+-0.972608 0.22678 0.0433357 0.0269479
+-0.985358 0.162683 0.0414804 0.0297244
+-0.993888 0.0978894 0.0394475 0.0323737
+-0.998162 0.0326765 0.0372457 0.0348844
+0.735586 0.0240806 0.49412 -0.462794
+0.732436 0.0721387 0.523331 -0.429486
+0.72615 0.119888 0.5503 -0.394339
+0.716754 0.167124 0.574913 -0.357504
+0.704289 0.213644 0.597064 -0.319137
+0.688808 0.259249 0.616658 -0.279404
+0.670378 0.303744 0.633611 -0.238474
+0.649076 0.346939 0.647852 -0.196524
+0.624996 0.388647 0.659318 -0.153731
+0.598239 0.428692 0.667961 -0.110281
+0.56892 0.466901 0.673743 -0.0663579
+0.537165 0.50311 0.676641 -0.0221509
+0.50311 0.537165 0.676641 0.0221509
+0.466901 0.56892 0.673743 0.0663579
+0.428692 0.598239 0.667961 0.110281
+0.388647 0.624996 0.659318 0.153731
+0.346939 0.649077 0.647852 0.196524
+0.303744 0.670378 0.633611 0.238474
+0.259249 0.688808 0.616658 0.279404
+0.213644 0.704289 0.597064 0.319137
+0.167124 0.716754 0.574913 0.357504
+0.119888 0.72615 0.5503 0.394339
+0.0721386 0.732436 0.52333 0.429486
+0.0240805 0.735586 0.49412 0.462794
+-0.0240807 0.735586 0.462794 0.49412
+-0.0721387 0.732436 0.429486 0.523331
+-0.119888 0.72615 0.394339 0.5503
+-0.167124 0.716754 0.357504 0.574913
+-0.213644 0.704289 0.319137 0.597064
+-0.259249 0.688808 0.279404 0.616658
+-0.303744 0.670378 0.238474 0.633611
+-0.346939 0.649077 0.196524 0.647852
+-0.388647 0.624996 0.153731 0.659318
+-0.428692 0.598239 0.110281 0.667961
+-0.466901 0.56892 0.0663579 0.673743
+-0.50311 0.537165 0.0221509 0.676641
+-0.537165 0.50311 -0.0221509 0.676641
+-0.56892 0.466901 -0.0663579 0.673743
+-0.598239 0.428692 -0.110281 0.667961
+-0.624996 0.388647 -0.153731 0.659318
+-0.649076 0.346939 -0.196524 0.647852
+-0.670378 0.303744 -0.238474 0.633611
+-0.688808 0.259249 -0.279404 0.616658
+-0.704289 0.213644 -0.319137 0.597064
+-0.716754 0.167124 -0.357504 0.574913
+-0.72615 0.119888 -0.394339 0.5503
+-0.732436 0.0721386 -0.429486 0.523331
+-0.735586 0.0240807 -0.462794 0.49412
+0.763354 0.0249896 0.512107 -0.392954
+0.760085 0.0748618 0.536711 -0.358619
+0.753561 0.124413 0.559017 -0.322749
+0.743811 0.173432 0.578929 -0.285496
+0.730875 0.221709 0.596362 -0.247021
+0.71481 0.269035 0.611241 -0.207488
+0.695684 0.31521 0.623502 -0.167067
+0.673578 0.360035 0.633094 -0.12593
+0.648589 0.403318 0.639975 -0.0842543
+0.620822 0.444875 0.644115 -0.0422175
+0.590396 0.484526 0.645497 -1.17023e-08
+0.557443 0.522102 0.644115 0.0422176
+0.522102 0.557443 0.639975 0.0842543
+0.484526 0.590396 0.633094 0.12593
+0.444875 0.620822 0.623502 0.167067
+0.403318 0.648589 0.611241 0.207488
+0.360035 0.673579 0.596362 0.247021
+0.31521 0.695684 0.578929 0.285496
+0.269035 0.71481 0.559017 0.322749
+0.221709 0.730875 0.536711 0.358619
+0.173432 0.743811 0.512107 0.392954
+0.124413 0.753561 0.48531 0.425606
+0.0748617 0.760085 0.456435 0.456436
+0.0249895 0.763354 0.425606 0.485311
+-0.0249897 0.763354 0.392954 0.512107
+-0.0748619 0.760085 0.358619 0.536711
+-0.124414 0.753561 0.322749 0.559017
+-0.173432 0.743811 0.285496 0.578929
+-0.221709 0.730875 0.247021 0.596362
+-0.269036 0.71481 0.207488 0.611241
+-0.31521 0.695684 0.167067 0.623502
+-0.360035 0.673579 0.12593 0.633094
+-0.403318 0.648589 0.0842543 0.639975
+-0.444875 0.620822 0.0422175 0.644115
+-0.484526 0.590397 2.19614e-08 0.645497
+-0.522102 0.557443 -0.0422176 0.644115
+-0.557443 0.522102 -0.0842543 0.639975
+-0.590397 0.484526 -0.12593 0.633094
+-0.620822 0.444875 -0.167067 0.623502
+-0.648589 0.403318 -0.207488 0.611241
+-0.673578 0.360035 -0.247021 0.596362
+-0.695684 0.31521 -0.285496 0.578929
+-0.71481 0.269035 -0.322749 0.559017
+-0.730875 0.221709 -0.358619 0.536711
+-0.743811 0.173432 -0.392954 0.512107
+-0.753561 0.124414 -0.425606 0.485311
+-0.760085 0.0748618 -0.456435 0.456435
+-0.763354 0.0249897 -0.48531 0.425606
+0.763354 0.0249896 0.425606 -0.485311
+0.760085 0.0748618 0.456435 -0.456435
+0.753561 0.124413 0.485311 -0.425606
+0.743811 0.173432 0.512107 -0.392954
+0.730875 0.221709 0.536711 -0.358619
+0.71481 0.269035 0.559017 -0.322749
+0.695684 0.31521 0.578929 -0.285496
+0.673578 0.360035 0.596362 -0.247021
+0.648589 0.403318 0.611241 -0.207488
+0.620822 0.444875 0.623502 -0.167067
+0.590396 0.484526 0.633094 -0.12593
+0.557443 0.522102 0.639975 -0.0842543
+0.522102 0.557443 0.644115 -0.0422175
+0.484526 0.590396 0.645497 1.44328e-09
+0.444875 0.620822 0.644115 0.0422176
+0.403318 0.648589 0.639975 0.0842544
+0.360035 0.673579 0.633094 0.12593
+0.31521 0.695684 0.623502 0.167067
+0.269035 0.71481 0.611241 0.207488
+0.221709 0.730875 0.596362 0.247021
+0.173432 0.743811 0.578929 0.285496
+0.124413 0.753561 0.559017 0.322749
+0.0748617 0.760085 0.536711 0.358619
+0.0249895 0.763354 0.512107 0.392954
+-0.0249897 0.763354 0.48531 0.425606
+-0.0748619 0.760085 0.456435 0.456436
+-0.124414 0.753561 0.425606 0.485311
+-0.173432 0.743811 0.392954 0.512107
+-0.221709 0.730875 0.358619 0.536711
+-0.269036 0.71481 0.322749 0.559017
+-0.31521 0.695684 0.285496 0.578929
+-0.360035 0.673579 0.247021 0.596362
+-0.403318 0.648589 0.207488 0.611241
+-0.444875 0.620822 0.167067 0.623502
+-0.484526 0.590397 0.12593 0.633094
+-0.522102 0.557443 0.0842542 0.639975
+-0.557443 0.522102 0.0422176 0.644115
+-0.590397 0.484526 -2.96589e-08 0.645497
+-0.620822 0.444875 -0.0422175 0.644115
+-0.648589 0.403318 -0.0842543 0.639975
+-0.673578 0.360035 -0.12593 0.633094
+-0.695684 0.31521 -0.167067 0.623502
+-0.71481 0.269035 -0.207488 0.611241
+-0.730875 0.221709 -0.247021 0.596362
+-0.743811 0.173432 -0.285496 0.578929
+-0.753561 0.124414 -0.322749 0.559017
+-0.760085 0.0748618 -0.358619 0.536711
+-0.763354 0.0249897 -0.392954 0.512107
+0.790146 0.0258667 0.446949 -0.418613
+0.786763 0.0774894 0.47337 -0.388485
+0.78001 0.12878 0.497765 -0.356693
+0.769917 0.17952 0.520028 -0.323374
+0.756528 0.22949 0.540064 -0.28867
+0.739899 0.278478 0.557788 -0.25273
+0.720101 0.326274 0.573123 -0.215708
+0.69722 0.372672 0.586004 -0.177762
+0.671353 0.417474 0.596375 -0.139055
+0.642612 0.460489 0.604193 -0.0997527
+0.611118 0.501532 0.609424 -0.060023
+0.577008 0.540427 0.612045 -0.0200362
+0.540427 0.577008 0.612045 0.0200363
+0.501532 0.611118 0.609424 0.060023
+0.460489 0.642612 0.604193 0.0997527
+0.417474 0.671353 0.596375 0.139055
+0.372672 0.69722 0.586004 0.177762
+0.326274 0.720101 0.573123 0.215708
+0.278478 0.739899 0.557788 0.25273
+0.22949 0.756528 0.540064 0.28867
+0.17952 0.769917 0.520028 0.323374
+0.12878 0.78001 0.497765 0.356693
+0.0774893 0.786763 0.47337 0.388485
+0.0258666 0.790146 0.446949 0.418613
+-0.0258668 0.790146 0.418613 0.446949
+-0.0774894 0.786763 0.388485 0.47337
+-0.12878 0.78001 0.356693 0.497765
+-0.17952 0.769917 0.323374 0.520028
+-0.22949 0.756528 0.28867 0.540064
+-0.278478 0.739899 0.25273 0.557788
+-0.326274 0.720101 0.215708 0.573123
+-0.372672 0.69722 0.177762 0.586004
+-0.417474 0.671353 0.139055 0.596375
+-0.460489 0.642612 0.0997526 0.604193
+-0.501532 0.611118 0.060023 0.609424
+-0.540427 0.577008 0.0200362 0.612045
+-0.577008 0.540427 -0.0200362 0.612045
+-0.611118 0.501532 -0.060023 0.609424
+-0.642612 0.460489 -0.0997526 0.604193
+-0.671353 0.417474 -0.139055 0.596375
+-0.69722 0.372672 -0.177762 0.586004
+-0.720101 0.326274 -0.215708 0.573123
+-0.739899 0.278478 -0.25273 0.557788
+-0.756528 0.22949 -0.28867 0.540064
+-0.769917 0.179519 -0.323374 0.520028
+-0.78001 0.12878 -0.356693 0.497765
+-0.786763 0.0774893 -0.388485 0.47337
+-0.790146 0.0258668 -0.418613 0.446949
+0.790146 0.0258667 0.520028 -0.323374
+0.786763 0.0774894 0.540064 -0.28867
+0.78001 0.12878 0.557788 -0.25273
+0.769917 0.17952 0.573123 -0.215708
+0.756528 0.22949 0.586004 -0.177762
+0.739899 0.278478 0.596375 -0.139055
+0.720101 0.326274 0.604193 -0.0997527
+0.69722 0.372672 0.609424 -0.060023
+0.671353 0.417474 0.612045 -0.0200363
+0.642612 0.460489 0.612045 0.0200363
+0.611118 0.501532 0.609424 0.060023
+0.577008 0.540427 0.604193 0.0997527
+0.540427 0.577008 0.596375 0.139055
+0.501532 0.611118 0.586004 0.177762
+0.460489 0.642612 0.573123 0.215708
+0.417474 0.671353 0.557788 0.25273
+0.372672 0.69722 0.540064 0.28867
+0.326274 0.720101 0.520028 0.323374
+0.278478 0.739899 0.497765 0.356693
+0.22949 0.756528 0.47337 0.388485
+0.17952 0.769917 0.446949 0.418613
+0.12878 0.78001 0.418613 0.446949
+0.0774893 0.786763 0.388485 0.47337
+0.0258666 0.790146 0.356693 0.497765
+-0.0258668 0.790146 0.323374 0.520028
+-0.0774894 0.786763 0.28867 0.540064
+-0.12878 0.78001 0.25273 0.557788
+-0.17952 0.769917 0.215708 0.573123
+-0.22949 0.756528 0.177762 0.586004
+-0.278478 0.739899 0.139055 0.596375
+-0.326274 0.720101 0.0997526 0.604193
+-0.372672 0.69722 0.0600231 0.609424
+-0.417474 0.671353 0.0200363 0.612045
+-0.460489 0.642612 -0.0200363 0.612045
+-0.501532 0.611118 -0.060023 0.609424
+-0.540427 0.577008 -0.0997527 0.604193
+-0.577008 0.540427 -0.139055 0.596375
+-0.611118 0.501532 -0.177762 0.586004
+-0.642612 0.460489 -0.215708 0.573123
+-0.671353 0.417474 -0.25273 0.557788
+-0.69722 0.372672 -0.28867 0.540064
+-0.720101 0.326274 -0.323374 0.520028
+-0.739899 0.278478 -0.356693 0.497765
+-0.756528 0.22949 -0.388485 0.47337
+-0.769917 0.179519 -0.418613 0.446949
+-0.78001 0.12878 -0.446949 0.418613
+-0.786763 0.0774893 -0.47337 0.388485
+-0.790146 0.0258668 -0.497765 0.356693
+0.816059 0.026715 0.51781 -0.255355
+0.812565 0.0800307 0.533402 -0.220942
+0.805591 0.133004 0.54671 -0.185583
+0.795167 0.185407 0.557678 -0.149429
+0.781339 0.237016 0.566257 -0.112635
+0.764164 0.287611 0.572411 -0.0753593
+0.743717 0.336974 0.576114 -0.0377605
+0.720086 0.384894 0.57735 1.21881e-08
+0.693371 0.431166 0.576114 0.0377605
+0.663687 0.475591 0.572411 0.0753593
+0.63116 0.51798 0.566257 0.112635
+0.595932 0.558151 0.557678 0.149429
+0.558151 0.595932 0.54671 0.185583
+0.51798 0.63116 0.533402 0.220942
+0.475591 0.663687 0.51781 0.255356
+0.431166 0.693371 0.5 0.288675
+0.384894 0.720086 0.480049 0.320759
+0.336974 0.743717 0.458043 0.351469
+0.287611 0.764164 0.434075 0.380674
+0.237016 0.781339 0.408248 0.408248
+0.185407 0.795167 0.380673 0.434075
+0.133003 0.805591 0.351469 0.458043
+0.0800306 0.812565 0.320759 0.480049
+0.0267149 0.816059 0.288675 0.5
+-0.0267151 0.816059 0.255355 0.51781
+-0.0800307 0.812565 0.220942 0.533402
+-0.133004 0.805591 0.185583 0.54671
+-0.185407 0.795167 0.149429 0.557678
+-0.237017 0.781338 0.112635 0.566257
+-0.287611 0.764164 0.0753592 0.572411
+-0.336974 0.743717 0.0377604 0.576114
+-0.384894 0.720086 6.58134e-08 0.57735
+-0.431166 0.693371 -0.0377605 0.576114
+-0.475591 0.663686 -0.0753594 0.572411
+-0.51798 0.63116 -0.112635 0.566257
+-0.558151 0.595931 -0.149429 0.557678
+-0.595931 0.558151 -0.185583 0.54671
+-0.63116 0.51798 -0.220942 0.533402
+-0.663686 0.475591 -0.255355 0.51781
+-0.693371 0.431166 -0.288675 0.5
+-0.720086 0.384894 -0.320759 0.480049
+-0.743717 0.336974 -0.351469 0.458043
+-0.764164 0.287611 -0.380674 0.434075
+-0.781339 0.237016 -0.408248 0.408248
+-0.795167 0.185407 -0.434075 0.380673
+-0.805591 0.133004 -0.458043 0.351469
+-0.812565 0.0800306 -0.480049 0.320759
+-0.816059 0.0267151 -0.5 0.288675
+0.816059 0.026715 0.458043 -0.351469
+0.812565 0.0800307 0.480049 -0.320759
+0.805591 0.133004 0.5 -0.288675
+0.795167 0.185407 0.51781 -0.255355
+0.781339 0.237016 0.533402 -0.220942
+0.764164 0.287611 0.54671 -0.185583
+0.743717 0.336974 0.557678 -0.149429
+0.720086 0.384894 0.566257 -0.112635
+0.693371 0.431166 0.572411 -0.0753593
+0.663687 0.475591 0.576114 -0.0377605
+0.63116 0.51798 0.57735 -1.04669e-08
+0.595932 0.558151 0.576114 0.0377605
+0.558151 0.595932 0.572411 0.0753593
+0.51798 0.63116 0.566257 0.112635
+0.475591 0.663687 0.557678 0.149429
+0.431166 0.693371 0.54671 0.185583
+0.384894 0.720086 0.533402 0.220942
+0.336974 0.743717 0.51781 0.255356
+0.287611 0.764164 0.5 0.288675
+0.237016 0.781339 0.480049 0.320759
+0.185407 0.795167 0.458043 0.351469
+0.133003 0.805591 0.434075 0.380674
+0.0800306 0.812565 0.408248 0.408248
+0.0267149 0.816059 0.380673 0.434075
+-0.0267151 0.816059 0.351469 0.458043
+-0.0800307 0.812565 0.320759 0.480049
+-0.133004 0.805591 0.288675 0.5
+-0.185407 0.795167 0.255355 0.51781
+-0.237017 0.781338 0.220942 0.533402
+-0.287611 0.764164 0.185583 0.54671
+-0.336974 0.743717 0.149429 0.557678
+-0.384894 0.720086 0.112636 0.566257
+-0.431166 0.693371 0.0753593 0.572411
+-0.475591 0.663686 0.0377605 0.576114
+-0.51798 0.63116 1.96429e-08 0.57735
+-0.558151 0.595931 -0.0377606 0.576114
+-0.595931 0.558151 -0.0753593 0.572411
+-0.63116 0.51798 -0.112635 0.566257
+-0.663686 0.475591 -0.149429 0.557678
+-0.693371 0.431166 -0.185583 0.54671
+-0.720086 0.384894 -0.220942 0.533402
+-0.743717 0.336974 -0.255355 0.51781
+-0.764164 0.287611 -0.288675 0.5
+-0.781339 0.237016 -0.320759 0.480049
+-0.795167 0.185407 -0.351469 0.458043
+-0.805591 0.133004 -0.380673 0.434075
+-0.812565 0.0800306 -0.408248 0.408248
+-0.816059 0.0267151 -0.434075 0.380674
+0.841175 0.0275372 0.458622 -0.285189
+0.837573 0.0824937 0.476292 -0.254583
+0.830384 0.137097 0.491923 -0.222887
+0.81964 0.191113 0.505447 -0.190237
+0.805385 0.244311 0.516807 -0.156772
+0.787682 0.296463 0.525954 -0.122635
+0.766606 0.347345 0.532848 -0.0879736
+0.742247 0.396739 0.537461 -0.0529353
+0.71471 0.444435 0.539773 -0.0176703
+0.684112 0.490228 0.539773 0.0176703
+0.650585 0.533921 0.537461 0.0529353
+0.614272 0.575329 0.532848 0.0879736
+0.575329 0.614272 0.525954 0.122635
+0.533922 0.650585 0.516807 0.156772
+0.490228 0.684112 0.505447 0.190237
+0.444435 0.71471 0.491923 0.222887
+0.396739 0.742247 0.476292 0.254583
+0.347345 0.766606 0.458622 0.285189
+0.296463 0.787682 0.438987 0.314574
+0.244311 0.805385 0.417473 0.342612
+0.191113 0.81964 0.394172 0.369182
+0.137097 0.830384 0.369182 0.394172
+0.0824936 0.837573 0.342611 0.417473
+0.0275371 0.841175 0.314574 0.438987
+-0.0275373 0.841175 0.285189 0.458622
+-0.0824938 0.837573 0.254583 0.476292
+-0.137097 0.830384 0.222887 0.491923
+-0.191113 0.81964 0.190237 0.505447
+-0.244311 0.805385 0.156772 0.516807
+-0.296463 0.787682 0.122635 0.525954
+-0.347345 0.766606 0.0879735 0.532848
+-0.396739 0.742247 0.0529354 0.537461
+-0.444435 0.71471 0.0176703 0.539773
+-0.490228 0.684112 -0.0176704 0.539773
+-0.533921 0.650585 -0.0529353 0.537461
+-0.575329 0.614272 -0.0879736 0.532848
+-0.614272 0.575329 -0.122635 0.525954
+-0.650585 0.533921 -0.156772 0.516807
+-0.684112 0.490228 -0.190237 0.505447
+-0.71471 0.444435 -0.222887 0.491923
+-0.742247 0.39674 -0.254583 0.476292
+-0.766606 0.347345 -0.285189 0.458622
+-0.787682 0.296463 -0.314574 0.438987
+-0.805385 0.244311 -0.342612 0.417473
+-0.81964 0.191113 -0.369182 0.394172
+-0.830384 0.137097 -0.394172 0.369182
+-0.837573 0.0824937 -0.417473 0.342612
+-0.841175 0.0275373 -0.438987 0.314574
+0.790146 0.0258667 0.356693 -0.497765
+0.786763 0.0774894 0.388485 -0.47337
+0.78001 0.12878 0.418613 -0.446949
+0.769917 0.17952 0.446949 -0.418613
+0.756528 0.22949 0.47337 -0.388485
+0.739899 0.278478 0.497765 -0.356693
+0.720101 0.326274 0.520028 -0.323374
+0.69722 0.372672 0.540064 -0.28867
+0.671353 0.417474 0.557788 -0.25273
+0.642612 0.460489 0.573123 -0.215708
+0.611118 0.501532 0.586004 -0.177762
+0.577008 0.540427 0.596375 -0.139055
+0.540427 0.577008 0.604193 -0.0997527
+0.501532 0.611118 0.609424 -0.060023
+0.460489 0.642612 0.612045 -0.0200362
+0.417474 0.671353 0.612045 0.0200363
+0.372672 0.69722 0.609424 0.060023
+0.326274 0.720101 0.604193 0.0997527
+0.278478 0.739899 0.596375 0.139055
+0.22949 0.756528 0.586004 0.177762
+0.17952 0.769917 0.573123 0.215708
+0.12878 0.78001 0.557788 0.25273
+0.0774893 0.786763 0.540064 0.28867
+0.0258666 0.790146 0.520028 0.323374
+-0.0258668 0.790146 0.497765 0.356693
+-0.0774894 0.786763 0.47337 0.388485
+-0.12878 0.78001 0.446949 0.418613
+-0.17952 0.769917 0.418613 0.446949
+-0.22949 0.756528 0.388485 0.47337
+-0.278478 0.739899 0.356693 0.497765
+-0.326274 0.720101 0.323374 0.520028
+-0.372672 0.69722 0.28867 0.540064
+-0.417474 0.671353 0.25273 0.557788
+-0.460489 0.642612 0.215708 0.573123
+-0.501532 0.611118 0.177762 0.586004
+-0.540427 0.577008 0.139055 0.596375
+-0.577008 0.540427 0.0997527 0.604193
+-0.611118 0.501532 0.060023 0.609424
+-0.642612 0.460489 0.0200363 0.612045
+-0.671353 0.417474 -0.0200363 0.612045
+-0.69722 0.372672 -0.0600229 0.609424
+-0.720101 0.326274 -0.0997527 0.604193
+-0.739899 0.278478 -0.139055 0.596375
+-0.756528 0.22949 -0.177762 0.586004
+-0.769917 0.179519 -0.215708 0.573123
+-0.78001 0.12878 -0.25273 0.557788
+-0.786763 0.0774893 -0.28867 0.540064
+-0.790146 0.0258668 -0.323374 0.520028
+0.816059 0.026715 0.380673 -0.434075
+0.812565 0.0800307 0.408248 -0.408248
+0.805591 0.133004 0.434075 -0.380673
+0.795167 0.185407 0.458043 -0.351469
+0.781339 0.237016 0.480049 -0.320759
+0.764164 0.287611 0.5 -0.288675
+0.743717 0.336974 0.51781 -0.255355
+0.720086 0.384894 0.533402 -0.220942
+0.693371 0.431166 0.54671 -0.185583
+0.663687 0.475591 0.557678 -0.149429
+0.63116 0.51798 0.566257 -0.112635
+0.595932 0.558151 0.572411 -0.0753593
+0.558151 0.595932 0.576114 -0.0377605
+0.51798 0.63116 0.57735 1.29091e-09
+0.475591 0.663687 0.576114 0.0377605
+0.431166 0.693371 0.572411 0.0753594
+0.384894 0.720086 0.566257 0.112635
+0.336974 0.743717 0.557678 0.149429
+0.287611 0.764164 0.54671 0.185583
+0.237016 0.781339 0.533402 0.220942
+0.185407 0.795167 0.51781 0.255356
+0.133003 0.805591 0.5 0.288675
+0.0800306 0.812565 0.480049 0.320759
+0.0267149 0.816059 0.458043 0.351469
+-0.0267151 0.816059 0.434075 0.380674
+-0.0800307 0.812565 0.408248 0.408248
+-0.133004 0.805591 0.380673 0.434075
+-0.185407 0.795167 0.351469 0.458043
+-0.237017 0.781338 0.320759 0.480049
+-0.287611 0.764164 0.288675 0.5
+-0.336974 0.743717 0.255355 0.51781
+-0.384894 0.720086 0.220942 0.533402
+-0.431166 0.693371 0.185583 0.54671
+-0.475591 0.663686 0.149429 0.557678
+-0.51798 0.63116 0.112635 0.566257
+-0.558151 0.595931 0.0753593 0.572411
+-0.595931 0.558151 0.0377605 0.576114
+-0.63116 0.51798 -2.65277e-08 0.57735
+-0.663686 0.475591 -0.0377605 0.576114
+-0.693371 0.431166 -0.0753593 0.572411
+-0.720086 0.384894 -0.112635 0.566257
+-0.743717 0.336974 -0.149429 0.557678
+-0.764164 0.287611 -0.185583 0.54671
+-0.781339 0.237016 -0.220942 0.533402
+-0.795167 0.185407 -0.255356 0.51781
+-0.805591 0.133004 -0.288675 0.5
+-0.812565 0.0800306 -0.320759 0.480049
+-0.816059 0.0267151 -0.351469 0.458043
+0.816059 0.026715 0.288675 -0.5
+0.812565 0.0800307 0.320759 -0.480049
+0.805591 0.133004 0.351469 -0.458043
+0.795167 0.185407 0.380673 -0.434075
+0.781339 0.237016 0.408248 -0.408248
+0.764164 0.287611 0.434075 -0.380673
+0.743717 0.336974 0.458043 -0.351469
+0.720086 0.384894 0.480049 -0.320759
+0.693371 0.431166 0.5 -0.288675
+0.663687 0.475591 0.51781 -0.255355
+0.63116 0.51798 0.533402 -0.220942
+0.595932 0.558151 0.54671 -0.185583
+0.558151 0.595932 0.557678 -0.149429
+0.51798 0.63116 0.566257 -0.112635
+0.475591 0.663687 0.572411 -0.0753593
+0.431166 0.693371 0.576114 -0.0377605
+0.384894 0.720086 0.57735 4.74615e-08
+0.336974 0.743717 0.576114 0.0377606
+0.287611 0.764164 0.572411 0.0753594
+0.237016 0.781339 0.566257 0.112635
+0.185407 0.795167 0.557678 0.149429
+0.133003 0.805591 0.54671 0.185583
+0.0800306 0.812565 0.533402 0.220942
+0.0267149 0.816059 0.51781 0.255356
+-0.0267151 0.816059 0.5 0.288675
+-0.0800307 0.812565 0.480049 0.320759
+-0.133004 0.805591 0.458043 0.351469
+-0.185407 0.795167 0.434075 0.380674
+-0.237017 0.781338 0.408248 0.408248
+-0.287611 0.764164 0.380673 0.434075
+-0.336974 0.743717 0.351469 0.458043
+-0.384894 0.720086 0.320759 0.480049
+-0.431166 0.693371 0.288675 0.5
+-0.475591 0.663686 0.255355 0.51781
+-0.51798 0.63116 0.220942 0.533402
+-0.558151 0.595931 0.185583 0.54671
+-0.595931 0.558151 0.149429 0.557678
+-0.63116 0.51798 0.112635 0.566257
+-0.663686 0.475591 0.0753594 0.572411
+-0.693371 0.431166 0.0377605 0.576114
+-0.720086 0.384894 6.49528e-08 0.57735
+-0.743717 0.336974 -0.0377605 0.576114
+-0.764164 0.287611 -0.0753594 0.572411
+-0.781339 0.237016 -0.112635 0.566257
+-0.795167 0.185407 -0.149429 0.557678
+-0.805591 0.133004 -0.185583 0.54671
+-0.812565 0.0800306 -0.220942 0.533402
+-0.816059 0.0267151 -0.255355 0.51781
+0.841175 0.0275372 0.314574 -0.438987
+0.837573 0.0824937 0.342612 -0.417473
+0.830384 0.137097 0.369182 -0.394172
+0.81964 0.191113 0.394172 -0.369182
+0.805385 0.244311 0.417473 -0.342612
+0.787682 0.296463 0.438987 -0.314574
+0.766606 0.347345 0.458622 -0.285189
+0.742247 0.396739 0.476292 -0.254583
+0.71471 0.444435 0.491923 -0.222887
+0.684112 0.490228 0.505447 -0.190237
+0.650585 0.533921 0.516807 -0.156772
+0.614272 0.575329 0.525954 -0.122635
+0.575329 0.614272 0.532848 -0.0879736
+0.533922 0.650585 0.537461 -0.0529353
+0.490228 0.684112 0.539773 -0.0176703
+0.444435 0.71471 0.539773 0.0176704
+0.396739 0.742247 0.537461 0.0529354
+0.347345 0.766606 0.532848 0.0879736
+0.296463 0.787682 0.525954 0.122635
+0.244311 0.805385 0.516807 0.156772
+0.191113 0.81964 0.505447 0.190237
+0.137097 0.830384 0.491923 0.222887
+0.0824936 0.837573 0.476292 0.254583
+0.0275371 0.841175 0.458622 0.285189
+-0.0275373 0.841175 0.438987 0.314574
+-0.0824938 0.837573 0.417473 0.342612
+-0.137097 0.830384 0.394172 0.369182
+-0.191113 0.81964 0.369182 0.394172
+-0.244311 0.805385 0.342611 0.417473
+-0.296463 0.787682 0.314574 0.438987
+-0.347345 0.766606 0.285189 0.458622
+-0.396739 0.742247 0.254583 0.476292
+-0.444435 0.71471 0.222887 0.491923
+-0.490228 0.684112 0.190237 0.505447
+-0.533921 0.650585 0.156772 0.516807
+-0.575329 0.614272 0.122635 0.525954
+-0.614272 0.575329 0.0879736 0.532848
+-0.650585 0.533921 0.0529353 0.537461
+-0.684112 0.490228 0.0176704 0.539773
+-0.71471 0.444435 -0.0176703 0.539773
+-0.742247 0.39674 -0.0529352 0.537461
+-0.766606 0.347345 -0.0879736 0.532848
+-0.787682 0.296463 -0.122635 0.525954
+-0.805385 0.244311 -0.156772 0.516807
+-0.81964 0.191113 -0.190237 0.505447
+-0.830384 0.137097 -0.222887 0.491923
+-0.837573 0.0824937 -0.254583 0.476292
+-0.841175 0.0275373 -0.285189 0.458622
+0.841175 0.0275372 0.394172 -0.369182
+0.837573 0.0824937 0.417473 -0.342612
+0.830384 0.137097 0.438987 -0.314574
+0.81964 0.191113 0.458622 -0.285189
+0.805385 0.244311 0.476292 -0.254583
+0.787682 0.296463 0.491923 -0.222887
+0.766606 0.347345 0.505447 -0.190237
+0.742247 0.396739 0.516807 -0.156772
+0.71471 0.444435 0.525954 -0.122635
+0.684112 0.490228 0.532848 -0.0879736
+0.650585 0.533921 0.537461 -0.0529353
+0.614272 0.575329 0.539773 -0.0176703
+0.575329 0.614272 0.539773 0.0176703
+0.533922 0.650585 0.537461 0.0529353
+0.490228 0.684112 0.532848 0.0879736
+0.444435 0.71471 0.525954 0.122635
+0.396739 0.742247 0.516807 0.156772
+0.347345 0.766606 0.505447 0.190237
+0.296463 0.787682 0.491923 0.222887
+0.244311 0.805385 0.476292 0.254583
+0.191113 0.81964 0.458622 0.285189
+0.137097 0.830384 0.438987 0.314574
+0.0824936 0.837573 0.417473 0.342612
+0.0275371 0.841175 0.394172 0.369182
+-0.0275373 0.841175 0.369182 0.394172
+-0.0824938 0.837573 0.342611 0.417473
+-0.137097 0.830384 0.314574 0.438987
+-0.191113 0.81964 0.285189 0.458622
+-0.244311 0.805385 0.254583 0.476292
+-0.296463 0.787682 0.222887 0.491923
+-0.347345 0.766606 0.190237 0.505447
+-0.396739 0.742247 0.156772 0.516807
+-0.444435 0.71471 0.122635 0.525954
+-0.490228 0.684112 0.0879736 0.532848
+-0.533921 0.650585 0.0529353 0.537461
+-0.575329 0.614272 0.0176703 0.539773
+-0.614272 0.575329 -0.0176703 0.539773
+-0.650585 0.533921 -0.0529353 0.537461
+-0.684112 0.490228 -0.0879736 0.532848
+-0.71471 0.444435 -0.122635 0.525954
+-0.742247 0.39674 -0.156772 0.516807
+-0.766606 0.347345 -0.190237 0.505447
+-0.787682 0.296463 -0.222887 0.491923
+-0.805385 0.244311 -0.254583 0.476292
+-0.81964 0.191113 -0.285189 0.458622
+-0.830384 0.137097 -0.314574 0.438987
+-0.837573 0.0824937 -0.342612 0.417473
+-0.841175 0.0275373 -0.369182 0.394172
+0.865562 0.0283356 0.396677 -0.304381
+0.861855 0.0848853 0.415735 -0.277785
+0.854458 0.141072 0.433013 -0.25
+0.843402 0.196654 0.448436 -0.221144
+0.828735 0.251394 0.46194 -0.191342
+0.810518 0.305057 0.473465 -0.16072
+0.788831 0.357415 0.482963 -0.12941
+0.763766 0.408242 0.490393 -0.0975452
+0.735431 0.45732 0.495722 -0.0652631
+0.703946 0.50444 0.498929 -0.0327016
+0.669447 0.549401 0.5 -9.06459e-09
+0.632081 0.592008 0.498929 0.0327016
+0.592008 0.632081 0.495722 0.0652631
+0.549401 0.669447 0.490393 0.0975452
+0.50444 0.703946 0.482963 0.12941
+0.45732 0.735431 0.473465 0.16072
+0.408241 0.763766 0.46194 0.191342
+0.357415 0.788831 0.448436 0.221144
+0.305057 0.810518 0.433013 0.25
+0.251394 0.828735 0.415735 0.277785
+0.196654 0.843402 0.396677 0.304381
+0.141072 0.854458 0.37592 0.329673
+0.0848852 0.861855 0.353553 0.353553
+0.0283355 0.865562 0.329673 0.37592
+-0.0283356 0.865562 0.304381 0.396677
+-0.0848854 0.861855 0.277785 0.415735
+-0.141072 0.854458 0.25 0.433013
+-0.196654 0.843402 0.221144 0.448436
+-0.251394 0.828735 0.191342 0.46194
+-0.305058 0.810518 0.16072 0.473465
+-0.357415 0.788831 0.129409 0.482963
+-0.408241 0.763766 0.0975452 0.490393
+-0.45732 0.735431 0.0652631 0.495722
+-0.504441 0.703946 0.0327015 0.498929
+-0.549401 0.669447 1.70112e-08 0.5
+-0.592008 0.632081 -0.0327016 0.498929
+-0.632081 0.592008 -0.0652631 0.495722
+-0.669447 0.549401 -0.0975452 0.490393
+-0.703946 0.504441 -0.129409 0.482963
+-0.735431 0.45732 -0.16072 0.473465
+-0.763766 0.408242 -0.191342 0.46194
+-0.788831 0.357415 -0.221144 0.448436
+-0.810518 0.305057 -0.25 0.433013
+-0.828735 0.251394 -0.277785 0.415735
+-0.843402 0.196654 -0.304381 0.396677
+-0.854458 0.141072 -0.329673 0.37592
+-0.861855 0.0848853 -0.353553 0.353553
+-0.865562 0.0283356 -0.37592 0.329673
+0.865562 0.0283356 0.329673 -0.37592
+0.861855 0.0848853 0.353553 -0.353553
+0.854458 0.141072 0.37592 -0.329673
+0.843402 0.196654 0.396677 -0.304381
+0.828735 0.251394 0.415735 -0.277785
+0.810518 0.305057 0.433013 -0.25
+0.788831 0.357415 0.448436 -0.221144
+0.763766 0.408242 0.46194 -0.191342
+0.735431 0.45732 0.473465 -0.16072
+0.703946 0.50444 0.482963 -0.12941
+0.669447 0.549401 0.490393 -0.0975452
+0.632081 0.592008 0.495722 -0.0652631
+0.592008 0.632081 0.498929 -0.0327016
+0.549401 0.669447 0.5 1.11796e-09
+0.50444 0.703946 0.498929 0.0327016
+0.45732 0.735431 0.495722 0.0652631
+0.408241 0.763766 0.490393 0.0975452
+0.357415 0.788831 0.482963 0.12941
+0.305057 0.810518 0.473465 0.16072
+0.251394 0.828735 0.46194 0.191342
+0.196654 0.843402 0.448436 0.221144
+0.141072 0.854458 0.433013 0.25
+0.0848852 0.861855 0.415735 0.277785
+0.0283355 0.865562 0.396677 0.304381
+-0.0283356 0.865562 0.37592 0.329673
+-0.0848854 0.861855 0.353553 0.353553
+-0.141072 0.854458 0.329673 0.37592
+-0.196654 0.843402 0.304381 0.396677
+-0.251394 0.828735 0.277785 0.415735
+-0.305058 0.810518 0.25 0.433013
+-0.357415 0.788831 0.221144 0.448436
+-0.408241 0.763766 0.191342 0.46194
+-0.45732 0.735431 0.16072 0.473465
+-0.504441 0.703946 0.129409 0.482963
+-0.549401 0.669447 0.0975452 0.490393
+-0.592008 0.632081 0.0652631 0.495722
+-0.632081 0.592008 0.0327016 0.498929
+-0.669447 0.549401 -2.29737e-08 0.5
+-0.703946 0.504441 -0.0327015 0.498929
+-0.735431 0.45732 -0.0652631 0.495722
+-0.763766 0.408242 -0.0975451 0.490393
+-0.788831 0.357415 -0.12941 0.482963
+-0.810518 0.305057 -0.16072 0.473465
+-0.828735 0.251394 -0.191342 0.46194
+-0.843402 0.196654 -0.221144 0.448436
+-0.854458 0.141072 -0.25 0.433013
+-0.861855 0.0848853 -0.277785 0.415735
+-0.865562 0.0283356 -0.304381 0.396677
+0.88928 0.029112 0.333136 -0.312016
+0.885472 0.0872114 0.352829 -0.28956
+0.877872 0.144937 0.371012 -0.265863
+0.866513 0.202043 0.387606 -0.241029
+0.851444 0.258283 0.40254 -0.215162
+0.832728 0.313417 0.415751 -0.188374
+0.810447 0.367209 0.427181 -0.160779
+0.784695 0.419428 0.436782 -0.132496
+0.755583 0.469852 0.444512 -0.103646
+0.723236 0.518263 0.450339 -0.0743513
+0.687791 0.564456 0.454238 -0.0447385
+0.649401 0.608231 0.456191 -0.0149341
+0.608231 0.649401 0.456191 0.0149342
+0.564456 0.687791 0.454238 0.0447385
+0.518263 0.723236 0.450339 0.0743513
+0.469852 0.755583 0.444512 0.103646
+0.419428 0.784695 0.436781 0.132496
+0.367209 0.810447 0.427181 0.160779
+0.313417 0.832728 0.415751 0.188374
+0.258283 0.851444 0.40254 0.215162
+0.202043 0.866513 0.387606 0.241029
+0.144937 0.877872 0.371012 0.265864
+0.0872113 0.885472 0.352829 0.28956
+0.0291119 0.88928 0.333136 0.312016
+-0.0291121 0.88928 0.312016 0.333136
+-0.0872115 0.885472 0.28956 0.352829
+-0.144937 0.877872 0.265863 0.371012
+-0.202043 0.866513 0.241029 0.387606
+-0.258283 0.851444 0.215162 0.40254
+-0.313417 0.832728 0.188374 0.415751
+-0.367209 0.810447 0.160779 0.427181
+-0.419428 0.784695 0.132496 0.436781
+-0.469852 0.755583 0.103646 0.444512
+-0.518263 0.723236 0.0743512 0.450339
+-0.564456 0.687791 0.0447385 0.454238
+-0.608231 0.649401 0.0149341 0.456191
+-0.649401 0.608231 -0.0149341 0.456191
+-0.687791 0.564456 -0.0447385 0.454238
+-0.723236 0.518263 -0.0743512 0.450339
+-0.755583 0.469852 -0.103646 0.444512
+-0.784695 0.419428 -0.132496 0.436782
+-0.810447 0.367209 -0.160779 0.427181
+-0.832728 0.313417 -0.188374 0.415751
+-0.851444 0.258283 -0.215162 0.40254
+-0.866513 0.202043 -0.241029 0.387606
+-0.877872 0.144937 -0.265863 0.371012
+-0.885472 0.0872113 -0.28956 0.352829
+-0.88928 0.0291121 -0.312016 0.333136
+0.841175 0.0275372 0.505447 -0.190237
+0.837573 0.0824937 0.516807 -0.156772
+0.830384 0.137097 0.525954 -0.122635
+0.81964 0.191113 0.532848 -0.0879736
+0.805385 0.244311 0.537461 -0.0529353
+0.787682 0.296463 0.539773 -0.0176703
+0.766606 0.347345 0.539773 0.0176703
+0.742247 0.396739 0.537461 0.0529353
+0.71471 0.444435 0.532848 0.0879736
+0.684112 0.490228 0.525954 0.122635
+0.650585 0.533921 0.516807 0.156772
+0.614272 0.575329 0.505447 0.190237
+0.575329 0.614272 0.491923 0.222887
+0.533922 0.650585 0.476292 0.254583
+0.490228 0.684112 0.458622 0.285189
+0.444435 0.71471 0.438987 0.314574
+0.396739 0.742247 0.417473 0.342612
+0.347345 0.766606 0.394172 0.369182
+0.296463 0.787682 0.369182 0.394172
+0.244311 0.805385 0.342612 0.417473
+0.191113 0.81964 0.314574 0.438987
+0.137097 0.830384 0.285189 0.458622
+0.0824936 0.837573 0.254583 0.476292
+0.0275371 0.841175 0.222887 0.491923
+-0.0275373 0.841175 0.190237 0.505447
+-0.0824938 0.837573 0.156772 0.516807
+-0.137097 0.830384 0.122635 0.525954
+-0.191113 0.81964 0.0879736 0.532848
+-0.244311 0.805385 0.0529352 0.537461
+-0.296463 0.787682 0.0176702 0.539773
+-0.347345 0.766606 -0.0176704 0.539773
+-0.396739 0.742247 -0.0529352 0.537461
+-0.444435 0.71471 -0.0879736 0.532848
+-0.490228 0.684112 -0.122635 0.525954
+-0.533921 0.650585 -0.156772 0.516807
+-0.575329 0.614272 -0.190237 0.505447
+-0.614272 0.575329 -0.222887 0.491923
+-0.650585 0.533921 -0.254583 0.476292
+-0.684112 0.490228 -0.285189 0.458622
+-0.71471 0.444435 -0.314574 0.438987
+-0.742247 0.39674 -0.342611 0.417473
+-0.766606 0.347345 -0.369182 0.394172
+-0.787682 0.296463 -0.394172 0.369182
+-0.805385 0.244311 -0.417473 0.342612
+-0.81964 0.191113 -0.438987 0.314574
+-0.830384 0.137097 -0.458622 0.285189
+-0.837573 0.0824937 -0.476292 0.254583
+-0.841175 0.0275373 -0.491923 0.222887
+0.865562 0.0283356 0.482963 -0.12941
+0.861855 0.0848853 0.490393 -0.0975452
+0.854458 0.141072 0.495722 -0.0652631
+0.843402 0.196654 0.498929 -0.0327016
+0.828735 0.251394 0.5 3.72652e-10
+0.810518 0.305057 0.498929 0.0327016
+0.788831 0.357415 0.495722 0.0652631
+0.763766 0.408242 0.490393 0.0975452
+0.735431 0.45732 0.482963 0.12941
+0.703946 0.50444 0.473465 0.16072
+0.669447 0.549401 0.46194 0.191342
+0.632081 0.592008 0.448436 0.221144
+0.592008 0.632081 0.433013 0.25
+0.549401 0.669447 0.415735 0.277785
+0.50444 0.703946 0.396677 0.304381
+0.45732 0.735431 0.37592 0.329673
+0.408241 0.763766 0.353553 0.353553
+0.357415 0.788831 0.329673 0.37592
+0.305057 0.810518 0.304381 0.396677
+0.251394 0.828735 0.277785 0.415735
+0.196654 0.843402 0.25 0.433013
+0.141072 0.854458 0.221144 0.448436
+0.0848852 0.861855 0.191342 0.46194
+0.0283355 0.865562 0.16072 0.473465
+-0.0283356 0.865562 0.129409 0.482963
+-0.0848854 0.861855 0.0975451 0.490393
+-0.141072 0.854458 0.0652631 0.495722
+-0.196654 0.843402 0.0327015 0.498929
+-0.251394 0.828735 -8.1833e-08 0.5
+-0.305058 0.810518 -0.0327016 0.498929
+-0.357415 0.788831 -0.0652632 0.495722
+-0.408241 0.763766 -0.0975451 0.490393
+-0.45732 0.735431 -0.12941 0.482963
+-0.504441 0.703946 -0.16072 0.473465
+-0.549401 0.669447 -0.191342 0.46194
+-0.592008 0.632081 -0.221144 0.448436
+-0.632081 0.592008 -0.25 0.433013
+-0.669447 0.549401 -0.277785 0.415735
+-0.703946 0.504441 -0.304381 0.396677
+-0.735431 0.45732 -0.329673 0.37592
+-0.763766 0.408242 -0.353553 0.353553
+-0.788831 0.357415 -0.37592 0.329673
+-0.810518 0.305057 -0.396677 0.304381
+-0.828735 0.251394 -0.415735 0.277785
+-0.843402 0.196654 -0.433013 0.25
+-0.854458 0.141072 -0.448436 0.221144
+-0.861855 0.0848853 -0.46194 0.191342
+-0.865562 0.0283356 -0.473465 0.16072
+0.865562 0.0283356 0.448436 -0.221144
+0.861855 0.0848853 0.46194 -0.191342
+0.854458 0.141072 0.473465 -0.16072
+0.843402 0.196654 0.482963 -0.12941
+0.828735 0.251394 0.490393 -0.0975452
+0.810518 0.305057 0.495722 -0.0652631
+0.788831 0.357415 0.498929 -0.0327016
+0.763766 0.408242 0.5 1.05552e-08
+0.735431 0.45732 0.498929 0.0327016
+0.703946 0.50444 0.495722 0.0652631
+0.669447 0.549401 0.490393 0.0975452
+0.632081 0.592008 0.482963 0.12941
+0.592008 0.632081 0.473465 0.16072
+0.549401 0.669447 0.46194 0.191342
+0.50444 0.703946 0.448436 0.221144
+0.45732 0.735431 0.433013 0.25
+0.408241 0.763766 0.415735 0.277785
+0.357415 0.788831 0.396677 0.304381
+0.305057 0.810518 0.37592 0.329673
+0.251394 0.828735 0.353553 0.353553
+0.196654 0.843402 0.329673 0.37592
+0.141072 0.854458 0.304381 0.396677
+0.0848852 0.861855 0.277785 0.415735
+0.0283355 0.865562 0.25 0.433013
+-0.0283356 0.865562 0.221144 0.448436
+-0.0848854 0.861855 0.191342 0.46194
+-0.141072 0.854458 0.16072 0.473465
+-0.196654 0.843402 0.129409 0.482963
+-0.251394 0.828735 0.0975451 0.490393
+-0.305058 0.810518 0.065263 0.495722
+-0.357415 0.788831 0.0327015 0.498929
+-0.408241 0.763766 5.69961e-08 0.5
+-0.45732 0.735431 -0.0327016 0.498929
+-0.504441 0.703946 -0.0652631 0.495722
+-0.549401 0.669447 -0.0975451 0.490393
+-0.592008 0.632081 -0.12941 0.482963
+-0.632081 0.592008 -0.16072 0.473465
+-0.669447 0.549401 -0.191342 0.46194
+-0.703946 0.504441 -0.221144 0.448436
+-0.735431 0.45732 -0.25 0.433013
+-0.763766 0.408242 -0.277785 0.415735
+-0.788831 0.357415 -0.304381 0.396677
+-0.810518 0.305057 -0.329673 0.37592
+-0.828735 0.251394 -0.353553 0.353553
+-0.843402 0.196654 -0.37592 0.329673
+-0.854458 0.141072 -0.396677 0.304381
+-0.861855 0.0848853 -0.415735 0.277785
+-0.865562 0.0283356 -0.433013 0.25
+0.88928 0.029112 0.427181 -0.160779
+0.885472 0.0872114 0.436782 -0.132496
+0.877872 0.144937 0.444512 -0.103646
+0.866513 0.202043 0.450339 -0.0743513
+0.851444 0.258283 0.454238 -0.0447385
+0.832728 0.313417 0.456191 -0.0149342
+0.810447 0.367209 0.456191 0.0149342
+0.784695 0.419428 0.454238 0.0447385
+0.755583 0.469852 0.450339 0.0743513
+0.723236 0.518263 0.444512 0.103646
+0.687791 0.564456 0.436782 0.132496
+0.649401 0.608231 0.427181 0.160779
+0.608231 0.649401 0.415751 0.188374
+0.564456 0.687791 0.40254 0.215162
+0.518263 0.723236 0.387606 0.241029
+0.469852 0.755583 0.371012 0.265864
+0.419428 0.784695 0.352829 0.28956
+0.367209 0.810447 0.333136 0.312016
+0.313417 0.832728 0.312016 0.333136
+0.258283 0.851444 0.28956 0.352829
+0.202043 0.866513 0.265863 0.371012
+0.144937 0.877872 0.241029 0.387606
+0.0872113 0.885472 0.215162 0.40254
+0.0291119 0.88928 0.188374 0.415751
+-0.0291121 0.88928 0.160779 0.427181
+-0.0872115 0.885472 0.132496 0.436782
+-0.144937 0.877872 0.103646 0.444512
+-0.202043 0.866513 0.0743512 0.450339
+-0.258283 0.851444 0.0447384 0.454238
+-0.313417 0.832728 0.0149341 0.456191
+-0.367209 0.810447 -0.0149342 0.456191
+-0.419428 0.784695 -0.0447384 0.454238
+-0.469852 0.755583 -0.0743513 0.450339
+-0.518263 0.723236 -0.103646 0.444512
+-0.564456 0.687791 -0.132496 0.436782
+-0.608231 0.649401 -0.160779 0.427181
+-0.649401 0.608231 -0.188374 0.415751
+-0.687791 0.564456 -0.215162 0.40254
+-0.723236 0.518263 -0.241029 0.387606
+-0.755583 0.469852 -0.265863 0.371012
+-0.784695 0.419428 -0.28956 0.352829
+-0.810447 0.367209 -0.312016 0.333136
+-0.832728 0.313417 -0.333136 0.312016
+-0.851444 0.258283 -0.352829 0.28956
+-0.866513 0.202043 -0.371012 0.265863
+-0.877872 0.144937 -0.387606 0.241029
+-0.885472 0.0872113 -0.40254 0.215162
+-0.88928 0.0291121 -0.415751 0.188374
+0.88928 0.029112 0.450339 -0.0743513
+0.885472 0.0872114 0.454238 -0.0447385
+0.877872 0.144937 0.456191 -0.0149342
+0.866513 0.202043 0.456191 0.0149342
+0.851444 0.258283 0.454238 0.0447385
+0.832728 0.313417 0.450339 0.0743513
+0.810447 0.367209 0.444512 0.103646
+0.784695 0.419428 0.436782 0.132496
+0.755583 0.469852 0.427181 0.160779
+0.723236 0.518263 0.415751 0.188374
+0.687791 0.564456 0.40254 0.215162
+0.649401 0.608231 0.387606 0.241029
+0.608231 0.649401 0.371012 0.265863
+0.564456 0.687791 0.352829 0.28956
+0.518263 0.723236 0.333136 0.312016
+0.469852 0.755583 0.312016 0.333136
+0.419428 0.784695 0.28956 0.352829
+0.367209 0.810447 0.265863 0.371012
+0.313417 0.832728 0.241029 0.387606
+0.258283 0.851444 0.215162 0.40254
+0.202043 0.866513 0.188374 0.415751
+0.144937 0.877872 0.160779 0.427181
+0.0872113 0.885472 0.132496 0.436782
+0.0291119 0.88928 0.103646 0.444512
+-0.0291121 0.88928 0.0743512 0.450339
+-0.0872115 0.885472 0.0447385 0.454238
+-0.144937 0.877872 0.0149341 0.456191
+-0.202043 0.866513 -0.0149342 0.456191
+-0.258283 0.851444 -0.0447386 0.454238
+-0.313417 0.832728 -0.0743513 0.450339
+-0.367209 0.810447 -0.103646 0.444512
+-0.419428 0.784695 -0.132496 0.436782
+-0.469852 0.755583 -0.160779 0.427181
+-0.518263 0.723236 -0.188374 0.415751
+-0.564456 0.687791 -0.215162 0.40254
+-0.608231 0.649401 -0.241029 0.387606
+-0.649401 0.608231 -0.265863 0.371012
+-0.687791 0.564456 -0.28956 0.352829
+-0.723236 0.518263 -0.312016 0.333136
+-0.755583 0.469852 -0.333136 0.312016
+-0.784695 0.419428 -0.352829 0.28956
+-0.810447 0.367209 -0.371012 0.265863
+-0.832728 0.313417 -0.387606 0.241029
+-0.851444 0.258283 -0.40254 0.215162
+-0.866513 0.202043 -0.415751 0.188374
+-0.877872 0.144937 -0.427181 0.160779
+-0.885472 0.0872113 -0.436782 0.132496
+-0.88928 0.0291121 -0.444512 0.103646
+0.912382 0.0298683 0.407374 -0.0267007
+0.908475 0.089477 0.408248 1.11532e-09
+0.900678 0.148703 0.407374 0.0267007
+0.889024 0.207291 0.404756 0.0532871
+0.873563 0.264992 0.400404 0.0796453
+0.854361 0.321559 0.394338 0.105662
+0.831501 0.376748 0.386583 0.131227
+0.80508 0.430324 0.377172 0.15623
+0.775212 0.482058 0.366147 0.180564
+0.742024 0.531727 0.353553 0.204124
+0.705659 0.579119 0.339446 0.226811
+0.666272 0.624032 0.323885 0.248526
+0.624032 0.666272 0.306937 0.269177
+0.579119 0.705659 0.288675 0.288675
+0.531727 0.742024 0.269177 0.306937
+0.482058 0.775212 0.248526 0.323885
+0.430324 0.80508 0.226811 0.339446
+0.376748 0.831501 0.204124 0.353553
+0.321559 0.854361 0.180564 0.366147
+0.264992 0.873563 0.15623 0.377172
+0.207291 0.889024 0.131227 0.386583
+0.148702 0.900678 0.105662 0.394338
+0.0894769 0.908475 0.0796452 0.400404
+0.0298682 0.912382 0.0532871 0.404756
+-0.0298684 0.912382 0.0267007 0.407374
+-0.0894771 0.908475 -3.41689e-08 0.408248
+-0.148703 0.900678 -0.0267007 0.407374
+-0.207291 0.889024 -0.0532871 0.404756
+-0.264993 0.873563 -0.0796454 0.400404
+-0.321559 0.854361 -0.105662 0.394338
+-0.376748 0.831501 -0.131227 0.386583
+-0.430324 0.80508 -0.15623 0.377172
+-0.482058 0.775212 -0.180564 0.366147
+-0.531727 0.742024 -0.204124 0.353553
+-0.579119 0.705659 -0.226811 0.339446
+-0.624032 0.666272 -0.248526 0.323885
+-0.666272 0.624032 -0.269177 0.306937
+-0.705659 0.579119 -0.288675 0.288675
+-0.742024 0.531727 -0.306937 0.269177
+-0.775212 0.482058 -0.323885 0.248526
+-0.80508 0.430324 -0.339446 0.226811
+-0.831501 0.376748 -0.353553 0.204124
+-0.854361 0.321559 -0.366147 0.180564
+-0.873563 0.264992 -0.377172 0.15623
+-0.889024 0.207291 -0.386583 0.131227
+-0.900678 0.148703 -0.394338 0.105662
+-0.908475 0.089477 -0.400404 0.0796453
+-0.912382 0.0298684 -0.404756 0.0532871
+0.912382 0.0298683 0.394338 -0.105662
+0.908475 0.089477 0.400404 -0.0796453
+0.900678 0.148703 0.404756 -0.0532871
+0.889024 0.207291 0.407374 -0.0267007
+0.873563 0.264992 0.408248 3.04269e-10
+0.854361 0.321559 0.407374 0.0267007
+0.831501 0.376748 0.404756 0.0532871
+0.80508 0.430324 0.400404 0.0796453
+0.775212 0.482058 0.394338 0.105662
+0.742024 0.531727 0.386583 0.131227
+0.705659 0.579119 0.377172 0.15623
+0.666272 0.624032 0.366147 0.180564
+0.624032 0.666272 0.353553 0.204124
+0.579119 0.705659 0.339446 0.226811
+0.531727 0.742024 0.323885 0.248526
+0.482058 0.775212 0.306937 0.269177
+0.430324 0.80508 0.288675 0.288675
+0.376748 0.831501 0.269177 0.306937
+0.321559 0.854361 0.248526 0.323885
+0.264992 0.873563 0.226811 0.339446
+0.207291 0.889024 0.204124 0.353553
+0.148702 0.900678 0.180564 0.366147
+0.0894769 0.908475 0.15623 0.377172
+0.0298682 0.912382 0.131227 0.386583
+-0.0298684 0.912382 0.105662 0.394338
+-0.0894771 0.908475 0.0796453 0.400404
+-0.148703 0.900678 0.0532871 0.404756
+-0.207291 0.889024 0.0267007 0.407374
+-0.264993 0.873563 -6.68164e-08 0.408248
+-0.321559 0.854361 -0.0267008 0.407374
+-0.376748 0.831501 -0.0532872 0.404756
+-0.430324 0.80508 -0.0796452 0.400404
+-0.482058 0.775212 -0.105662 0.394338
+-0.531727 0.742024 -0.131227 0.386583
+-0.579119 0.705659 -0.15623 0.377172
+-0.624032 0.666272 -0.180564 0.366147
+-0.666272 0.624032 -0.204124 0.353553
+-0.705659 0.579119 -0.226811 0.339446
+-0.742024 0.531727 -0.248526 0.323885
+-0.775212 0.482058 -0.269177 0.306937
+-0.80508 0.430324 -0.288675 0.288675
+-0.831501 0.376748 -0.306937 0.269177
+-0.854361 0.321559 -0.323885 0.248526
+-0.873563 0.264992 -0.339446 0.226811
+-0.889024 0.207291 -0.353553 0.204124
+-0.900678 0.148703 -0.366147 0.180564
+-0.908475 0.089477 -0.377172 0.15623
+-0.912382 0.0298684 -0.386583 0.131227
+0.933521 0.0305603 0.35609 -0.0283599
+0.929524 0.0915501 0.357182 -0.0050098
+0.921546 0.152148 0.356745 0.0183618
+0.909622 0.212094 0.35478 0.0416547
+0.893803 0.271132 0.351296 0.0647692
+0.874156 0.329009 0.346308 0.0876064
+0.850766 0.385477 0.339837 0.110069
+0.823733 0.440295 0.33191 0.132059
+0.793173 0.493227 0.322563 0.153484
+0.759216 0.544047 0.311834 0.174252
+0.722008 0.592537 0.299769 0.194274
+0.681709 0.63849 0.286421 0.213464
+0.63849 0.681709 0.271847 0.23174
+0.592537 0.722008 0.256108 0.249023
+0.544047 0.759216 0.239273 0.265241
+0.493227 0.793173 0.221413 0.280322
+0.440295 0.823733 0.202605 0.294203
+0.385477 0.850766 0.18293 0.306824
+0.329009 0.874156 0.162471 0.318131
+0.271132 0.893803 0.141316 0.328076
+0.212094 0.909622 0.119556 0.336616
+0.152148 0.921546 0.0972846 0.343715
+0.09155 0.929524 0.0745963 0.349342
+0.0305602 0.933521 0.0515885 0.353472
+-0.0305604 0.933521 0.0283599 0.35609
+-0.0915502 0.929524 0.00500977 0.357182
+-0.152148 0.921546 -0.0183618 0.356745
+-0.212094 0.909622 -0.0416547 0.35478
+-0.271132 0.893803 -0.0647693 0.351296
+-0.329009 0.874156 -0.0876065 0.346308
+-0.385477 0.850766 -0.110069 0.339837
+-0.440295 0.823733 -0.132059 0.33191
+-0.493227 0.793173 -0.153484 0.322563
+-0.544047 0.759216 -0.174252 0.311834
+-0.592537 0.722008 -0.194274 0.299769
+-0.63849 0.681709 -0.213464 0.286421
+-0.681709 0.63849 -0.23174 0.271847
+-0.722008 0.592537 -0.249023 0.256108
+-0.759216 0.544047 -0.265241 0.239273
+-0.793173 0.493227 -0.280322 0.221413
+-0.823733 0.440295 -0.294203 0.202605
+-0.850766 0.385477 -0.306824 0.18293
+-0.874156 0.329009 -0.318131 0.162471
+-0.893803 0.271132 -0.328076 0.141316
+-0.909622 0.212094 -0.336616 0.119556
+-0.921546 0.152148 -0.343715 0.0972846
+-0.929524 0.0915501 -0.349342 0.0745963
+-0.933521 0.0305604 -0.353472 0.0515886
+0.88928 0.029112 0.387606 -0.241029
+0.885472 0.0872114 0.40254 -0.215162
+0.877872 0.144937 0.415751 -0.188374
+0.866513 0.202043 0.427181 -0.160779
+0.851444 0.258283 0.436782 -0.132496
+0.832728 0.313417 0.444512 -0.103646
+0.810447 0.367209 0.450339 -0.0743513
+0.784695 0.419428 0.454238 -0.0447385
+0.755583 0.469852 0.456191 -0.0149341
+0.723236 0.518263 0.456191 0.0149341
+0.687791 0.564456 0.454238 0.0447385
+0.649401 0.608231 0.450339 0.0743513
+0.608231 0.649401 0.444512 0.103646
+0.564456 0.687791 0.436782 0.132496
+0.518263 0.723236 0.427181 0.160779
+0.469852 0.755583 0.415751 0.188374
+0.419428 0.784695 0.40254 0.215162
+0.367209 0.810447 0.387606 0.241029
+0.313417 0.832728 0.371012 0.265863
+0.258283 0.851444 0.352829 0.28956
+0.202043 0.866513 0.333136 0.312016
+0.144937 0.877872 0.312016 0.333136
+0.0872113 0.885472 0.28956 0.352829
+0.0291119 0.88928 0.265863 0.371012
+-0.0291121 0.88928 0.241029 0.387606
+-0.0872115 0.885472 0.215162 0.40254
+-0.144937 0.877872 0.188374 0.415751
+-0.202043 0.866513 0.160779 0.427181
+-0.258283 0.851444 0.132496 0.436782
+-0.313417 0.832728 0.103646 0.444512
+-0.367209 0.810447 0.0743512 0.450339
+-0.419428 0.784695 0.0447386 0.454238
+-0.469852 0.755583 0.0149342 0.456191
+-0.518263 0.723236 -0.0149342 0.456191
+-0.564456 0.687791 -0.0447385 0.454238
+-0.608231 0.649401 -0.0743513 0.450339
+-0.649401 0.608231 -0.103646 0.444512
+-0.687791 0.564456 -0.132496 0.436782
+-0.723236 0.518263 -0.160779 0.427181
+-0.755583 0.469852 -0.188374 0.415751
+-0.784695 0.419428 -0.215162 0.40254
+-0.810447 0.367209 -0.241029 0.387606
+-0.832728 0.313417 -0.265864 0.371012
+-0.851444 0.258283 -0.28956 0.352829
+-0.866513 0.202043 -0.312016 0.333136
+-0.877872 0.144937 -0.333136 0.312016
+-0.885472 0.0872113 -0.352829 0.28956
+-0.88928 0.0291121 -0.371012 0.265864
+0.912382 0.0298683 0.366147 -0.180564
+0.908475 0.089477 0.377172 -0.15623
+0.900678 0.148703 0.386583 -0.131227
+0.889024 0.207291 0.394338 -0.105662
+0.873563 0.264992 0.400404 -0.0796453
+0.854361 0.321559 0.404756 -0.0532871
+0.831501 0.376748 0.407374 -0.0267007
+0.80508 0.430324 0.408248 8.61828e-09
+0.775212 0.482058 0.407374 0.0267007
+0.742024 0.531727 0.404756 0.0532871
+0.705659 0.579119 0.400404 0.0796453
+0.666272 0.624032 0.394338 0.105662
+0.624032 0.666272 0.386583 0.131227
+0.579119 0.705659 0.377172 0.15623
+0.531727 0.742024 0.366147 0.180564
+0.482058 0.775212 0.353553 0.204124
+0.430324 0.80508 0.339446 0.226811
+0.376748 0.831501 0.323885 0.248526
+0.321559 0.854361 0.306937 0.269177
+0.264992 0.873563 0.288675 0.288675
+0.207291 0.889024 0.269177 0.306937
+0.148702 0.900678 0.248526 0.323885
+0.0894769 0.908475 0.226811 0.339446
+0.0298682 0.912382 0.204124 0.353553
+-0.0298684 0.912382 0.180564 0.366147
+-0.0894771 0.908475 0.15623 0.377172
+-0.148703 0.900678 0.131227 0.386583
+-0.207291 0.889024 0.105662 0.394338
+-0.264993 0.873563 0.0796452 0.400404
+-0.321559 0.854361 0.053287 0.404756
+-0.376748 0.831501 0.0267007 0.407374
+-0.430324 0.80508 4.65371e-08 0.408248
+-0.482058 0.775212 -0.0267007 0.407374
+-0.531727 0.742024 -0.0532871 0.404756
+-0.579119 0.705659 -0.0796453 0.400404
+-0.624032 0.666272 -0.105662 0.394338
+-0.666272 0.624032 -0.131227 0.386583
+-0.705659 0.579119 -0.15623 0.377172
+-0.742024 0.531727 -0.180564 0.366147
+-0.775212 0.482058 -0.204124 0.353553
+-0.80508 0.430324 -0.226811 0.339446
+-0.831501 0.376748 -0.248526 0.323885
+-0.854361 0.321559 -0.269177 0.306937
+-0.873563 0.264992 -0.288675 0.288675
+-0.889024 0.207291 -0.306937 0.269177
+-0.900678 0.148703 -0.323885 0.248526
+-0.908475 0.089477 -0.339446 0.226811
+-0.912382 0.0298684 -0.353553 0.204124
+0.912382 0.0298683 0.323885 -0.248526
+0.908475 0.089477 0.339446 -0.226811
+0.900678 0.148703 0.353553 -0.204124
+0.889024 0.207291 0.366147 -0.180564
+0.873563 0.264992 0.377172 -0.15623
+0.854361 0.321559 0.386583 -0.131227
+0.831501 0.376748 0.394338 -0.105662
+0.80508 0.430324 0.400404 -0.0796453
+0.775212 0.482058 0.404756 -0.0532871
+0.742024 0.531727 0.407374 -0.0267007
+0.705659 0.579119 0.408248 -7.4012e-09
+0.666272 0.624032 0.407374 0.0267007
+0.624032 0.666272 0.404756 0.0532871
+0.579119 0.705659 0.400404 0.0796453
+0.531727 0.742024 0.394338 0.105662
+0.482058 0.775212 0.386583 0.131227
+0.430324 0.80508 0.377172 0.15623
+0.376748 0.831501 0.366147 0.180564
+0.321559 0.854361 0.353553 0.204124
+0.264992 0.873563 0.339446 0.226811
+0.207291 0.889024 0.323885 0.248526
+0.148702 0.900678 0.306937 0.269177
+0.0894769 0.908475 0.288675 0.288675
+0.0298682 0.912382 0.269177 0.306937
+-0.0298684 0.912382 0.248526 0.323885
+-0.0894771 0.908475 0.226811 0.339446
+-0.148703 0.900678 0.204124 0.353553
+-0.207291 0.889024 0.180564 0.366147
+-0.264993 0.873563 0.15623 0.377172
+-0.321559 0.854361 0.131227 0.386583
+-0.376748 0.831501 0.105662 0.394338
+-0.430324 0.80508 0.0796453 0.400404
+-0.482058 0.775212 0.0532871 0.404756
+-0.531727 0.742024 0.0267007 0.407374
+-0.579119 0.705659 1.38896e-08 0.408248
+-0.624032 0.666272 -0.0267007 0.407374
+-0.666272 0.624032 -0.0532871 0.404756
+-0.705659 0.579119 -0.0796453 0.400404
+-0.742024 0.531727 -0.105662 0.394338
+-0.775212 0.482058 -0.131227 0.386583
+-0.80508 0.430324 -0.15623 0.377172
+-0.831501 0.376748 -0.180564 0.366147
+-0.854361 0.321559 -0.204124 0.353553
+-0.873563 0.264992 -0.226811 0.339446
+-0.889024 0.207291 -0.248526 0.323885
+-0.900678 0.148703 -0.269177 0.306937
+-0.908475 0.089477 -0.288675 0.288675
+-0.912382 0.0298684 -0.306937 0.269177
+0.933521 0.0305603 0.308521 -0.180053
+0.929524 0.0915501 0.319636 -0.159489
+0.921546 0.152148 0.329383 -0.138242
+0.909622 0.212094 0.337719 -0.116404
+0.893803 0.271132 0.344609 -0.0940667
+0.874156 0.329009 0.350024 -0.0713268
+0.850766 0.385477 0.353939 -0.0482814
+0.823733 0.440295 0.356339 -0.0250293
+0.793173 0.493227 0.357213 -0.00166998
+0.759216 0.544047 0.356558 0.0216965
+0.722008 0.592537 0.354375 0.04497
+0.681709 0.63849 0.350675 0.068051
+0.63849 0.681709 0.345474 0.0908405
+0.592537 0.722008 0.338793 0.113241
+0.544047 0.759216 0.330661 0.135157
+0.493227 0.793173 0.321114 0.156494
+0.440295 0.823733 0.310191 0.17716
+0.385477 0.850766 0.29794 0.197069
+0.329009 0.874156 0.284413 0.216133
+0.271132 0.893803 0.269668 0.234272
+0.212094 0.909622 0.253769 0.251407
+0.152148 0.921546 0.236783 0.267466
+0.09155 0.929524 0.218783 0.28238
+0.0305602 0.933521 0.199846 0.296084
+-0.0305604 0.933521 0.180053 0.308521
+-0.0915502 0.929524 0.159489 0.319636
+-0.152148 0.921546 0.138242 0.329383
+-0.212094 0.909622 0.116404 0.337719
+-0.271132 0.893803 0.0940666 0.344609
+-0.329009 0.874156 0.0713267 0.350024
+-0.385477 0.850766 0.0482813 0.353939
+-0.440295 0.823733 0.0250293 0.356339
+-0.493227 0.793173 0.00166998 0.357213
+-0.544047 0.759216 -0.0216965 0.356558
+-0.592537 0.722008 -0.04497 0.354375
+-0.63849 0.681709 -0.068051 0.350675
+-0.681709 0.63849 -0.0908405 0.345474
+-0.722008 0.592537 -0.113241 0.338793
+-0.759216 0.544047 -0.135157 0.330661
+-0.793173 0.493227 -0.156494 0.321114
+-0.823733 0.440295 -0.17716 0.310191
+-0.850766 0.385477 -0.197069 0.29794
+-0.874156 0.329009 -0.216133 0.284413
+-0.893803 0.271132 -0.234272 0.269668
+-0.909622 0.212094 -0.251407 0.253769
+-0.921546 0.152148 -0.267466 0.236783
+-0.929524 0.0915501 -0.28238 0.218783
+-0.933521 0.0305604 -0.296084 0.199846
+0.933521 0.0305603 0.340851 -0.106886
+0.929524 0.0915501 0.347112 -0.0843647
+0.921546 0.152148 0.351887 -0.0614818
+0.909622 0.212094 0.355154 -0.0383357
+0.893803 0.271132 0.356901 -0.0150254
+0.874156 0.329009 0.35712 0.00834917
+0.850766 0.385477 0.355809 0.031688
+0.823733 0.440295 0.352975 0.0548912
+0.793173 0.493227 0.348629 0.0778593
+0.759216 0.544047 0.34279 0.100494
+0.722008 0.592537 0.335484 0.122698
+0.681709 0.63849 0.32674 0.144377
+0.63849 0.681709 0.316598 0.165438
+0.592537 0.722008 0.3051 0.18579
+0.544047 0.759216 0.292296 0.205347
+0.493227 0.793173 0.278239 0.224025
+0.440295 0.823733 0.262992 0.241743
+0.385477 0.850766 0.246618 0.258426
+0.329009 0.874156 0.229188 0.274002
+0.271132 0.893803 0.210777 0.288405
+0.212094 0.909622 0.191463 0.301573
+0.152148 0.921546 0.171329 0.313449
+0.09155 0.929524 0.150462 0.323984
+0.0305602 0.933521 0.12895 0.333131
+-0.0305604 0.933521 0.106886 0.340851
+-0.0915502 0.929524 0.0843647 0.347112
+-0.152148 0.921546 0.0614818 0.351887
+-0.212094 0.909622 0.0383357 0.355154
+-0.271132 0.893803 0.0150254 0.356901
+-0.329009 0.874156 -0.00834923 0.35712
+-0.385477 0.850766 -0.0316881 0.355809
+-0.440295 0.823733 -0.0548912 0.352975
+-0.493227 0.793173 -0.0778593 0.348629
+-0.544047 0.759216 -0.100494 0.34279
+-0.592537 0.722008 -0.122698 0.335484
+-0.63849 0.681709 -0.144377 0.32674
+-0.681709 0.63849 -0.165438 0.316598
+-0.722008 0.592537 -0.18579 0.3051
+-0.759216 0.544047 -0.205347 0.292296
+-0.793173 0.493227 -0.224025 0.278239
+-0.823733 0.440295 -0.241743 0.262992
+-0.850766 0.385477 -0.258425 0.246618
+-0.874156 0.329009 -0.274002 0.229188
+-0.893803 0.271132 -0.288405 0.210777
+-0.909622 0.212094 -0.301573 0.191463
+-0.921546 0.152148 -0.313449 0.171329
+-0.929524 0.0915501 -0.323984 0.150462
+-0.933521 0.0305604 -0.333131 0.12895
+0.951462 0.0311476 0.304712 -0.0300115
+0.947388 0.0933095 0.306022 -0.0100181
+0.939256 0.155072 0.306022 0.0100181
+0.927103 0.21617 0.304712 0.0300115
+0.91098 0.276343 0.302097 0.0498763
+0.890956 0.335332 0.298188 0.0695276
+0.867117 0.392885 0.293002 0.0888812
+0.839564 0.448756 0.286561 0.107854
+0.808416 0.502706 0.278894 0.126365
+0.773807 0.554502 0.270032 0.144335
+0.735884 0.603924 0.260014 0.161687
+0.69481 0.650761 0.248882 0.178347
+0.65076 0.69481 0.236685 0.194242
+0.603924 0.735884 0.223474 0.209307
+0.554502 0.773807 0.209307 0.223474
+0.502706 0.808416 0.194242 0.236685
+0.448756 0.839564 0.178347 0.248882
+0.392885 0.867117 0.161687 0.260014
+0.335332 0.890956 0.144335 0.270032
+0.276343 0.91098 0.126365 0.278894
+0.21617 0.927103 0.107854 0.286561
+0.155072 0.939256 0.0888811 0.293002
+0.0933094 0.947388 0.0695276 0.298188
+0.0311475 0.951462 0.0498763 0.302097
+-0.0311477 0.951462 0.0300115 0.304712
+-0.0933096 0.947388 0.0100181 0.306022
+-0.155072 0.939256 -0.0100182 0.306022
+-0.21617 0.927103 -0.0300115 0.304712
+-0.276343 0.91098 -0.0498764 0.302097
+-0.335332 0.890956 -0.0695277 0.298188
+-0.392886 0.867116 -0.0888812 0.293002
+-0.448756 0.839564 -0.107854 0.286562
+-0.502706 0.808416 -0.126365 0.278894
+-0.554502 0.773807 -0.144335 0.270032
+-0.603924 0.735884 -0.161687 0.260014
+-0.650761 0.69481 -0.178347 0.248882
+-0.69481 0.650761 -0.194242 0.236685
+-0.735884 0.603924 -0.209307 0.223474
+-0.773807 0.554502 -0.223474 0.209307
+-0.808416 0.502706 -0.236685 0.194242
+-0.839564 0.448756 -0.248882 0.178347
+-0.867117 0.392885 -0.260014 0.161687
+-0.890956 0.335332 -0.270032 0.144335
+-0.91098 0.276343 -0.278894 0.126365
+-0.927103 0.21617 -0.286562 0.107854
+-0.939256 0.155072 -0.293002 0.0888812
+-0.947388 0.0933095 -0.298188 0.0695276
+-0.951462 0.0311477 -0.302097 0.0498764
+0.951462 0.0311476 0.286561 -0.107854
+0.947388 0.0933095 0.293002 -0.0888812
+0.939256 0.155072 0.298188 -0.0695276
+0.927103 0.21617 0.302097 -0.0498763
+0.91098 0.276343 0.304712 -0.0300115
+0.890956 0.335332 0.306022 -0.0100181
+0.867117 0.392885 0.306022 0.0100181
+0.839564 0.448756 0.304712 0.0300115
+0.808416 0.502706 0.302097 0.0498764
+0.773807 0.554502 0.298188 0.0695276
+0.735884 0.603924 0.293002 0.0888812
+0.69481 0.650761 0.286561 0.107854
+0.65076 0.69481 0.278894 0.126365
+0.603924 0.735884 0.270032 0.144335
+0.554502 0.773807 0.260014 0.161687
+0.502706 0.808416 0.248882 0.178347
+0.448756 0.839564 0.236685 0.194242
+0.392885 0.867117 0.223474 0.209307
+0.335332 0.890956 0.209307 0.223474
+0.276343 0.91098 0.194242 0.236685
+0.21617 0.927103 0.178347 0.248882
+0.155072 0.939256 0.161687 0.260014
+0.0933094 0.947388 0.144335 0.270032
+0.0311475 0.951462 0.126365 0.278894
+-0.0311477 0.951462 0.107854 0.286562
+-0.0933096 0.947388 0.0888811 0.293002
+-0.155072 0.939256 0.0695276 0.298188
+-0.21617 0.927103 0.0498763 0.302097
+-0.276343 0.91098 0.0300114 0.304712
+-0.335332 0.890956 0.0100181 0.306022
+-0.392886 0.867116 -0.0100182 0.306022
+-0.448756 0.839564 -0.0300115 0.304712
+-0.502706 0.808416 -0.0498763 0.302097
+-0.554502 0.773807 -0.0695277 0.298188
+-0.603924 0.735884 -0.0888812 0.293002
+-0.650761 0.69481 -0.107854 0.286561
+-0.69481 0.650761 -0.126365 0.278894
+-0.735884 0.603924 -0.144335 0.270032
+-0.773807 0.554502 -0.161687 0.260014
+-0.808416 0.502706 -0.178347 0.248882
+-0.839564 0.448756 -0.194242 0.236685
+-0.867117 0.392885 -0.209307 0.223474
+-0.890956 0.335332 -0.223474 0.209307
+-0.91098 0.276343 -0.236685 0.194242
+-0.927103 0.21617 -0.248882 0.178347
+-0.939256 0.155072 -0.260014 0.161687
+-0.947388 0.0933095 -0.270032 0.144335
+-0.951462 0.0311477 -0.278894 0.126365
+0.966382 0.0316361 0.253185 -0.031648
+0.962244 0.0947728 0.254713 -0.0150212
+0.953986 0.157504 0.25515 0.00166997
+0.941642 0.21956 0.254494 0.018354
+0.925266 0.280676 0.252749 0.0349594
+0.904928 0.340591 0.249921 0.0514151
+0.880714 0.399046 0.246023 0.0676507
+0.85273 0.455794 0.241072 0.0835965
+0.821094 0.510589 0.235089 0.0991844
+0.785942 0.563198 0.228098 0.114348
+0.747424 0.613395 0.220131 0.129021
+0.705706 0.660965 0.211221 0.143142
+0.660965 0.705706 0.201407 0.15665
+0.613395 0.747424 0.190731 0.169487
+0.563198 0.785942 0.179237 0.181599
+0.510589 0.821094 0.166976 0.192933
+0.455793 0.85273 0.154 0.203441
+0.399046 0.880714 0.140365 0.213077
+0.340591 0.904928 0.126129 0.221801
+0.280676 0.925266 0.111352 0.229575
+0.21956 0.941642 0.0960987 0.236367
+0.157504 0.953986 0.0804338 0.242146
+0.0947727 0.962244 0.0644245 0.246888
+0.031636 0.966382 0.0481393 0.250573
+-0.0316362 0.966382 0.031648 0.253185
+-0.0947729 0.962244 0.0150212 0.254713
+-0.157504 0.953986 -0.00166999 0.25515
+-0.21956 0.941642 -0.018354 0.254494
+-0.280676 0.925266 -0.0349595 0.252749
+-0.340591 0.904927 -0.0514152 0.249921
+-0.399047 0.880714 -0.0676507 0.246023
+-0.455793 0.85273 -0.0835965 0.241072
+-0.510589 0.821094 -0.0991844 0.235089
+-0.563198 0.785941 -0.114348 0.228098
+-0.613395 0.747424 -0.129021 0.220131
+-0.660966 0.705706 -0.143142 0.211221
+-0.705706 0.660966 -0.15665 0.201407
+-0.747424 0.613395 -0.169487 0.190731
+-0.785942 0.563198 -0.181599 0.179237
+-0.821094 0.510589 -0.192933 0.166976
+-0.85273 0.455794 -0.20344 0.154
+-0.880714 0.399046 -0.213077 0.140365
+-0.904928 0.340591 -0.221801 0.126129
+-0.925266 0.280676 -0.229575 0.111352
+-0.941642 0.21956 -0.236367 0.0960987
+-0.953986 0.157504 -0.242146 0.0804339
+-0.962244 0.0947727 -0.246888 0.0644245
+-0.966382 0.0316362 -0.250573 0.0481394
+0.841175 0.0275372 0.222887 -0.491923
+0.837573 0.0824937 0.254583 -0.476292
+0.830384 0.137097 0.285189 -0.458622
+0.81964 0.191113 0.314574 -0.438987
+0.805385 0.244311 0.342612 -0.417473
+0.787682 0.296463 0.369182 -0.394172
+0.766606 0.347345 0.394172 -0.369182
+0.742247 0.396739 0.417473 -0.342612
+0.71471 0.444435 0.438987 -0.314574
+0.684112 0.490228 0.458622 -0.285189
+0.650585 0.533921 0.476292 -0.254583
+0.614272 0.575329 0.491923 -0.222887
+0.575329 0.614272 0.505447 -0.190237
+0.533922 0.650585 0.516807 -0.156772
+0.490228 0.684112 0.525954 -0.122635
+0.444435 0.71471 0.532848 -0.0879736
+0.396739 0.742247 0.537461 -0.0529353
+0.347345 0.766606 0.539773 -0.0176703
+0.296463 0.787682 0.539773 0.0176704
+0.244311 0.805385 0.537461 0.0529353
+0.191113 0.81964 0.532848 0.0879736
+0.137097 0.830384 0.525954 0.122635
+0.0824936 0.837573 0.516807 0.156772
+0.0275371 0.841175 0.505447 0.190237
+-0.0275373 0.841175 0.491923 0.222887
+-0.0824938 0.837573 0.476292 0.254583
+-0.137097 0.830384 0.458622 0.285189
+-0.191113 0.81964 0.438987 0.314574
+-0.244311 0.805385 0.417473 0.342612
+-0.296463 0.787682 0.394172 0.369182
+-0.347345 0.766606 0.369182 0.394172
+-0.396739 0.742247 0.342612 0.417473
+-0.444435 0.71471 0.314574 0.438987
+-0.490228 0.684112 0.285189 0.458622
+-0.533921 0.650585 0.254583 0.476292
+-0.575329 0.614272 0.222887 0.491923
+-0.614272 0.575329 0.190237 0.505447
+-0.650585 0.533921 0.156772 0.516807
+-0.684112 0.490228 0.122635 0.525954
+-0.71471 0.444435 0.0879736 0.532848
+-0.742247 0.39674 0.0529354 0.537461
+-0.766606 0.347345 0.0176703 0.539773
+-0.787682 0.296463 -0.0176704 0.539773
+-0.805385 0.244311 -0.0529353 0.537461
+-0.81964 0.191113 -0.0879736 0.532848
+-0.830384 0.137097 -0.122635 0.525954
+-0.837573 0.0824937 -0.156772 0.516807
+-0.841175 0.0275373 -0.190237 0.505447
+0.865562 0.0283356 0.25 -0.433013
+0.861855 0.0848853 0.277785 -0.415735
+0.854458 0.141072 0.304381 -0.396677
+0.843402 0.196654 0.329673 -0.37592
+0.828735 0.251394 0.353553 -0.353553
+0.810518 0.305057 0.37592 -0.329673
+0.788831 0.357415 0.396677 -0.304381
+0.763766 0.408242 0.415735 -0.277785
+0.735431 0.45732 0.433013 -0.25
+0.703946 0.50444 0.448436 -0.221144
+0.669447 0.549401 0.46194 -0.191342
+0.632081 0.592008 0.473465 -0.16072
+0.592008 0.632081 0.482963 -0.12941
+0.549401 0.669447 0.490393 -0.0975452
+0.50444 0.703946 0.495722 -0.0652631
+0.45732 0.735431 0.498929 -0.0327015
+0.408241 0.763766 0.5 4.11028e-08
+0.357415 0.788831 0.498929 0.0327016
+0.305057 0.810518 0.495722 0.0652631
+0.251394 0.828735 0.490393 0.0975452
+0.196654 0.843402 0.482963 0.12941
+0.141072 0.854458 0.473465 0.16072
+0.0848852 0.861855 0.46194 0.191342
+0.0283355 0.865562 0.448436 0.221144
+-0.0283356 0.865562 0.433013 0.25
+-0.0848854 0.861855 0.415735 0.277785
+-0.141072 0.854458 0.396677 0.304381
+-0.196654 0.843402 0.37592 0.329673
+-0.251394 0.828735 0.353553 0.353553
+-0.305058 0.810518 0.329673 0.37592
+-0.357415 0.788831 0.304381 0.396677
+-0.408241 0.763766 0.277785 0.415735
+-0.45732 0.735431 0.25 0.433013
+-0.504441 0.703946 0.221144 0.448436
+-0.549401 0.669447 0.191342 0.46194
+-0.592008 0.632081 0.16072 0.473465
+-0.632081 0.592008 0.12941 0.482963
+-0.669447 0.549401 0.0975451 0.490393
+-0.703946 0.504441 0.0652631 0.495722
+-0.735431 0.45732 0.0327016 0.498929
+-0.763766 0.408242 5.62508e-08 0.5
+-0.788831 0.357415 -0.0327016 0.498929
+-0.810518 0.305057 -0.0652631 0.495722
+-0.828735 0.251394 -0.0975451 0.490393
+-0.843402 0.196654 -0.12941 0.482963
+-0.854458 0.141072 -0.16072 0.473465
+-0.861855 0.0848853 -0.191342 0.46194
+-0.865562 0.0283356 -0.221144 0.448436
+0.865562 0.0283356 0.16072 -0.473465
+0.861855 0.0848853 0.191342 -0.46194
+0.854458 0.141072 0.221144 -0.448436
+0.843402 0.196654 0.25 -0.433013
+0.828735 0.251394 0.277785 -0.415735
+0.810518 0.305057 0.304381 -0.396677
+0.788831 0.357415 0.329673 -0.37592
+0.763766 0.408242 0.353553 -0.353553
+0.735431 0.45732 0.37592 -0.329673
+0.703946 0.50444 0.396677 -0.304381
+0.669447 0.549401 0.415735 -0.277785
+0.632081 0.592008 0.433013 -0.25
+0.592008 0.632081 0.448436 -0.221144
+0.549401 0.669447 0.46194 -0.191342
+0.50444 0.703946 0.473465 -0.16072
+0.45732 0.735431 0.482963 -0.129409
+0.408241 0.763766 0.490393 -0.0975451
+0.357415 0.788831 0.495722 -0.0652631
+0.305057 0.810518 0.498929 -0.0327015
+0.251394 0.828735 0.5 2.1483e-08
+0.196654 0.843402 0.498929 0.0327016
+0.141072 0.854458 0.495722 0.0652632
+0.0848852 0.861855 0.490393 0.0975452
+0.0283355 0.865562 0.482963 0.12941
+-0.0283356 0.865562 0.473465 0.16072
+-0.0848854 0.861855 0.46194 0.191342
+-0.141072 0.854458 0.448436 0.221144
+-0.196654 0.843402 0.433013 0.25
+-0.251394 0.828735 0.415735 0.277785
+-0.305058 0.810518 0.396677 0.304381
+-0.357415 0.788831 0.37592 0.329673
+-0.408241 0.763766 0.353553 0.353553
+-0.45732 0.735431 0.329673 0.37592
+-0.504441 0.703946 0.304381 0.396677
+-0.549401 0.669447 0.277785 0.415735
+-0.592008 0.632081 0.25 0.433013
+-0.632081 0.592008 0.221144 0.448436
+-0.669447 0.549401 0.191342 0.46194
+-0.703946 0.504441 0.16072 0.473465
+-0.735431 0.45732 0.12941 0.482963
+-0.763766 0.408242 0.0975452 0.490393
+-0.788831 0.357415 0.0652631 0.495722
+-0.810518 0.305057 0.0327015 0.498929
+-0.828735 0.251394 1.62659e-08 0.5
+-0.843402 0.196654 -0.0327016 0.498929
+-0.854458 0.141072 -0.0652631 0.495722
+-0.861855 0.0848853 -0.0975452 0.490393
+-0.865562 0.0283356 -0.129409 0.482963
+0.88928 0.029112 0.188374 -0.415751
+0.885472 0.0872114 0.215162 -0.40254
+0.877872 0.144937 0.241029 -0.387606
+0.866513 0.202043 0.265863 -0.371012
+0.851444 0.258283 0.28956 -0.352829
+0.832728 0.313417 0.312016 -0.333136
+0.810447 0.367209 0.333136 -0.312016
+0.784695 0.419428 0.352829 -0.28956
+0.755583 0.469852 0.371012 -0.265863
+0.723236 0.518263 0.387606 -0.241029
+0.687791 0.564456 0.40254 -0.215162
+0.649401 0.608231 0.415751 -0.188374
+0.608231 0.649401 0.427181 -0.160779
+0.564456 0.687791 0.436782 -0.132496
+0.518263 0.723236 0.444512 -0.103646
+0.469852 0.755583 0.450339 -0.0743512
+0.419428 0.784695 0.454238 -0.0447385
+0.367209 0.810447 0.456191 -0.0149341
+0.313417 0.832728 0.456191 0.0149342
+0.258283 0.851444 0.454238 0.0447385
+0.202043 0.866513 0.450339 0.0743513
+0.144937 0.877872 0.444512 0.103646
+0.0872113 0.885472 0.436781 0.132496
+0.0291119 0.88928 0.427181 0.160779
+-0.0291121 0.88928 0.415751 0.188374
+-0.0872115 0.885472 0.40254 0.215162
+-0.144937 0.877872 0.387606 0.241029
+-0.202043 0.866513 0.371012 0.265863
+-0.258283 0.851444 0.352829 0.28956
+-0.313417 0.832728 0.333136 0.312016
+-0.367209 0.810447 0.312016 0.333136
+-0.419428 0.784695 0.28956 0.352829
+-0.469852 0.755583 0.265863 0.371012
+-0.518263 0.723236 0.241029 0.387606
+-0.564456 0.687791 0.215162 0.40254
+-0.608231 0.649401 0.188374 0.415751
+-0.649401 0.608231 0.160779 0.427181
+-0.687791 0.564456 0.132496 0.436782
+-0.723236 0.518263 0.103646 0.444512
+-0.755583 0.469852 0.0743513 0.450339
+-0.784695 0.419428 0.0447386 0.454238
+-0.810447 0.367209 0.0149342 0.456191
+-0.832728 0.313417 -0.0149342 0.456191
+-0.851444 0.258283 -0.0447385 0.454238
+-0.866513 0.202043 -0.0743513 0.450339
+-0.877872 0.144937 -0.103646 0.444512
+-0.885472 0.0872113 -0.132496 0.436782
+-0.88928 0.0291121 -0.160779 0.427181
+0.88928 0.029112 0.265863 -0.371012
+0.885472 0.0872114 0.28956 -0.352829
+0.877872 0.144937 0.312016 -0.333136
+0.866513 0.202043 0.333136 -0.312016
+0.851444 0.258283 0.352829 -0.28956
+0.832728 0.313417 0.371012 -0.265863
+0.810447 0.367209 0.387606 -0.241029
+0.784695 0.419428 0.40254 -0.215162
+0.755583 0.469852 0.415751 -0.188374
+0.723236 0.518263 0.427181 -0.160779
+0.687791 0.564456 0.436782 -0.132496
+0.649401 0.608231 0.444512 -0.103646
+0.608231 0.649401 0.450339 -0.0743513
+0.564456 0.687791 0.454238 -0.0447385
+0.518263 0.723236 0.456191 -0.0149341
+0.469852 0.755583 0.456191 0.0149342
+0.419428 0.784695 0.454238 0.0447385
+0.367209 0.810447 0.450339 0.0743513
+0.313417 0.832728 0.444512 0.103646
+0.258283 0.851444 0.436782 0.132496
+0.202043 0.866513 0.427181 0.160779
+0.144937 0.877872 0.415751 0.188374
+0.0872113 0.885472 0.40254 0.215162
+0.0291119 0.88928 0.387606 0.241029
+-0.0291121 0.88928 0.371012 0.265864
+-0.0872115 0.885472 0.352829 0.28956
+-0.144937 0.877872 0.333136 0.312016
+-0.202043 0.866513 0.312016 0.333136
+-0.258283 0.851444 0.28956 0.352829
+-0.313417 0.832728 0.265863 0.371012
+-0.367209 0.810447 0.241029 0.387606
+-0.419428 0.784695 0.215162 0.40254
+-0.469852 0.755583 0.188374 0.415751
+-0.518263 0.723236 0.160779 0.427181
+-0.564456 0.687791 0.132496 0.436782
+-0.608231 0.649401 0.103646 0.444512
+-0.649401 0.608231 0.0743513 0.450339
+-0.687791 0.564456 0.0447385 0.454238
+-0.723236 0.518263 0.0149342 0.456191
+-0.755583 0.469852 -0.0149342 0.456191
+-0.784695 0.419428 -0.0447384 0.454238
+-0.810447 0.367209 -0.0743513 0.450339
+-0.832728 0.313417 -0.103646 0.444512
+-0.851444 0.258283 -0.132496 0.436782
+-0.866513 0.202043 -0.160779 0.427181
+-0.877872 0.144937 -0.188374 0.415751
+-0.885472 0.0872113 -0.215162 0.40254
+-0.88928 0.0291121 -0.241029 0.387606
+0.912382 0.0298683 0.269177 -0.306937
+0.908475 0.089477 0.288675 -0.288675
+0.900678 0.148703 0.306937 -0.269177
+0.889024 0.207291 0.323885 -0.248526
+0.873563 0.264992 0.339446 -0.226811
+0.854361 0.321559 0.353553 -0.204124
+0.831501 0.376748 0.366147 -0.180564
+0.80508 0.430324 0.377172 -0.15623
+0.775212 0.482058 0.386583 -0.131227
+0.742024 0.531727 0.394338 -0.105662
+0.705659 0.579119 0.400404 -0.0796453
+0.666272 0.624032 0.404756 -0.0532871
+0.624032 0.666272 0.407374 -0.0267007
+0.579119 0.705659 0.408248 9.12809e-10
+0.531727 0.742024 0.407374 0.0267007
+0.482058 0.775212 0.404756 0.0532871
+0.430324 0.80508 0.400404 0.0796453
+0.376748 0.831501 0.394338 0.105662
+0.321559 0.854361 0.386583 0.131227
+0.264992 0.873563 0.377172 0.15623
+0.207291 0.889024 0.366147 0.180564
+0.148702 0.900678 0.353553 0.204124
+0.0894769 0.908475 0.339446 0.226811
+0.0298682 0.912382 0.323885 0.248526
+-0.0298684 0.912382 0.306937 0.269177
+-0.0894771 0.908475 0.288675 0.288675
+-0.148703 0.900678 0.269177 0.306937
+-0.207291 0.889024 0.248526 0.323885
+-0.264993 0.873563 0.226811 0.339446
+-0.321559 0.854361 0.204124 0.353553
+-0.376748 0.831501 0.180564 0.366147
+-0.430324 0.80508 0.15623 0.377172
+-0.482058 0.775212 0.131227 0.386583
+-0.531727 0.742024 0.105662 0.394338
+-0.579119 0.705659 0.0796453 0.400404
+-0.624032 0.666272 0.0532871 0.404756
+-0.666272 0.624032 0.0267007 0.407374
+-0.705659 0.579119 -1.87579e-08 0.408248
+-0.742024 0.531727 -0.0267007 0.407374
+-0.775212 0.482058 -0.0532871 0.404756
+-0.80508 0.430324 -0.0796452 0.400404
+-0.831501 0.376748 -0.105662 0.394338
+-0.854361 0.321559 -0.131227 0.386583
+-0.873563 0.264992 -0.15623 0.377172
+-0.889024 0.207291 -0.180564 0.366147
+-0.900678 0.148703 -0.204124 0.353553
+-0.908475 0.089477 -0.226811 0.339446
+-0.912382 0.0298684 -0.248526 0.323885
+0.912382 0.0298683 0.204124 -0.353553
+0.908475 0.089477 0.226811 -0.339446
+0.900678 0.148703 0.248526 -0.323885
+0.889024 0.207291 0.269177 -0.306937
+0.873563 0.264992 0.288675 -0.288675
+0.854361 0.321559 0.306937 -0.269177
+0.831501 0.376748 0.323885 -0.248526
+0.80508 0.430324 0.339446 -0.226811
+0.775212 0.482058 0.353553 -0.204124
+0.742024 0.531727 0.366147 -0.180564
+0.705659 0.579119 0.377172 -0.15623
+0.666272 0.624032 0.386583 -0.131227
+0.624032 0.666272 0.394338 -0.105662
+0.579119 0.705659 0.400404 -0.0796453
+0.531727 0.742024 0.404756 -0.0532871
+0.482058 0.775212 0.407374 -0.0267007
+0.430324 0.80508 0.408248 3.35603e-08
+0.376748 0.831501 0.407374 0.0267007
+0.321559 0.854361 0.404756 0.0532871
+0.264992 0.873563 0.400404 0.0796453
+0.207291 0.889024 0.394338 0.105662
+0.148702 0.900678 0.386583 0.131227
+0.0894769 0.908475 0.377172 0.15623
+0.0298682 0.912382 0.366147 0.180564
+-0.0298684 0.912382 0.353553 0.204124
+-0.0894771 0.908475 0.339446 0.226811
+-0.148703 0.900678 0.323885 0.248526
+-0.207291 0.889024 0.306937 0.269177
+-0.264993 0.873563 0.288675 0.288675
+-0.321559 0.854361 0.269177 0.306937
+-0.376748 0.831501 0.248526 0.323885
+-0.430324 0.80508 0.226811 0.339446
+-0.482058 0.775212 0.204124 0.353553
+-0.531727 0.742024 0.180564 0.366147
+-0.579119 0.705659 0.15623 0.377172
+-0.624032 0.666272 0.131227 0.386583
+-0.666272 0.624032 0.105662 0.394338
+-0.705659 0.579119 0.0796453 0.400404
+-0.742024 0.531727 0.0532871 0.404756
+-0.775212 0.482058 0.0267007 0.407374
+-0.80508 0.430324 4.59286e-08 0.408248
+-0.831501 0.376748 -0.0267007 0.407374
+-0.854361 0.321559 -0.0532871 0.404756
+-0.873563 0.264992 -0.0796453 0.400404
+-0.889024 0.207291 -0.105662 0.394338
+-0.900678 0.148703 -0.131227 0.386583
+-0.908475 0.089477 -0.15623 0.377172
+-0.912382 0.0298684 -0.180564 0.366147
+0.933521 0.0305603 0.199846 -0.296084
+0.929524 0.0915501 0.218783 -0.28238
+0.921546 0.152148 0.236783 -0.267466
+0.909622 0.212094 0.253769 -0.251407
+0.893803 0.271132 0.269668 -0.234272
+0.874156 0.329009 0.284413 -0.216133
+0.850766 0.385477 0.29794 -0.197069
+0.823733 0.440295 0.310191 -0.17716
+0.793173 0.493227 0.321114 -0.156494
+0.759216 0.544047 0.330661 -0.135157
+0.722008 0.592537 0.338793 -0.113241
+0.681709 0.63849 0.345474 -0.0908405
+0.63849 0.681709 0.350675 -0.068051
+0.592537 0.722008 0.354375 -0.04497
+0.544047 0.759216 0.356558 -0.0216964
+0.493227 0.793173 0.357213 0.00167001
+0.440295 0.823733 0.356339 0.0250293
+0.385477 0.850766 0.353939 0.0482814
+0.329009 0.874156 0.350024 0.0713268
+0.271132 0.893803 0.344609 0.0940667
+0.212094 0.909622 0.337719 0.116404
+0.152148 0.921546 0.329383 0.138243
+0.09155 0.929524 0.319636 0.159489
+0.0305602 0.933521 0.308521 0.180053
+-0.0305604 0.933521 0.296084 0.199846
+-0.0915502 0.929524 0.28238 0.218783
+-0.152148 0.921546 0.267466 0.236783
+-0.212094 0.909622 0.251407 0.253769
+-0.271132 0.893803 0.234272 0.269668
+-0.329009 0.874156 0.216133 0.284413
+-0.385477 0.850766 0.197069 0.29794
+-0.440295 0.823733 0.17716 0.310191
+-0.493227 0.793173 0.156494 0.321114
+-0.544047 0.759216 0.135157 0.330661
+-0.592537 0.722008 0.113241 0.338793
+-0.63849 0.681709 0.0908405 0.345474
+-0.681709 0.63849 0.068051 0.350675
+-0.722008 0.592537 0.04497 0.354375
+-0.759216 0.544047 0.0216965 0.356558
+-0.793173 0.493227 -0.00166999 0.357213
+-0.823733 0.440295 -0.0250292 0.356339
+-0.850766 0.385477 -0.0482814 0.353939
+-0.874156 0.329009 -0.0713268 0.350024
+-0.893803 0.271132 -0.0940667 0.344609
+-0.909622 0.212094 -0.116404 0.337719
+-0.921546 0.152148 -0.138242 0.329383
+-0.929524 0.0915501 -0.159489 0.319636
+-0.933521 0.0305604 -0.180053 0.308521
+0.88928 0.029112 0.103646 -0.444512
+0.885472 0.0872114 0.132496 -0.436782
+0.877872 0.144937 0.160779 -0.427181
+0.866513 0.202043 0.188374 -0.415751
+0.851444 0.258283 0.215162 -0.40254
+0.832728 0.313417 0.241029 -0.387606
+0.810447 0.367209 0.265863 -0.371012
+0.784695 0.419428 0.28956 -0.352829
+0.755583 0.469852 0.312016 -0.333136
+0.723236 0.518263 0.333136 -0.312016
+0.687791 0.564456 0.352829 -0.28956
+0.649401 0.608231 0.371012 -0.265863
+0.608231 0.649401 0.387606 -0.241029
+0.564456 0.687791 0.40254 -0.215162
+0.518263 0.723236 0.415751 -0.188374
+0.469852 0.755583 0.427181 -0.160779
+0.419428 0.784695 0.436782 -0.132496
+0.367209 0.810447 0.444512 -0.103646
+0.313417 0.832728 0.450339 -0.0743512
+0.258283 0.851444 0.454238 -0.0447385
+0.202043 0.866513 0.456191 -0.0149341
+0.144937 0.877872 0.456191 0.0149342
+0.0872113 0.885472 0.454238 0.0447386
+0.0291119 0.88928 0.450339 0.0743513
+-0.0291121 0.88928 0.444512 0.103646
+-0.0872115 0.885472 0.436781 0.132496
+-0.144937 0.877872 0.427181 0.160779
+-0.202043 0.866513 0.415751 0.188374
+-0.258283 0.851444 0.40254 0.215162
+-0.313417 0.832728 0.387606 0.241029
+-0.367209 0.810447 0.371012 0.265864
+-0.419428 0.784695 0.352829 0.28956
+-0.469852 0.755583 0.333136 0.312016
+-0.518263 0.723236 0.312016 0.333136
+-0.564456 0.687791 0.28956 0.352829
+-0.608231 0.649401 0.265863 0.371012
+-0.649401 0.608231 0.241029 0.387606
+-0.687791 0.564456 0.215162 0.40254
+-0.723236 0.518263 0.188374 0.415751
+-0.755583 0.469852 0.160779 0.427181
+-0.784695 0.419428 0.132496 0.436781
+-0.810447 0.367209 0.103646 0.444512
+-0.832728 0.313417 0.0743512 0.450339
+-0.851444 0.258283 0.0447385 0.454238
+-0.866513 0.202043 0.0149341 0.456191
+-0.877872 0.144937 -0.0149341 0.456191
+-0.885472 0.0872113 -0.0447385 0.454238
+-0.88928 0.0291121 -0.0743512 0.450339
+0.912382 0.0298683 0.131227 -0.386583
+0.908475 0.089477 0.15623 -0.377172
+0.900678 0.148703 0.180564 -0.366147
+0.889024 0.207291 0.204124 -0.353553
+0.873563 0.264992 0.226811 -0.339446
+0.854361 0.321559 0.248526 -0.323885
+0.831501 0.376748 0.269177 -0.306937
+0.80508 0.430324 0.288675 -0.288675
+0.775212 0.482058 0.306937 -0.269177
+0.742024 0.531727 0.323885 -0.248526
+0.705659 0.579119 0.339446 -0.226811
+0.666272 0.624032 0.353553 -0.204124
+0.624032 0.666272 0.366147 -0.180564
+0.579119 0.705659 0.377172 -0.15623
+0.531727 0.742024 0.386583 -0.131227
+0.482058 0.775212 0.394338 -0.105662
+0.430324 0.80508 0.400404 -0.0796453
+0.376748 0.831501 0.404756 -0.0532871
+0.321559 0.854361 0.407374 -0.0267007
+0.264992 0.873563 0.408248 1.75408e-08
+0.207291 0.889024 0.407374 0.0267007
+0.148702 0.900678 0.404756 0.0532871
+0.0894769 0.908475 0.400404 0.0796453
+0.0298682 0.912382 0.394338 0.105662
+-0.0298684 0.912382 0.386583 0.131227
+-0.0894771 0.908475 0.377172 0.15623
+-0.148703 0.900678 0.366147 0.180564
+-0.207291 0.889024 0.353553 0.204124
+-0.264993 0.873563 0.339446 0.226811
+-0.321559 0.854361 0.323885 0.248526
+-0.376748 0.831501 0.306937 0.269177
+-0.430324 0.80508 0.288675 0.288675
+-0.482058 0.775212 0.269177 0.306937
+-0.531727 0.742024 0.248526 0.323885
+-0.579119 0.705659 0.226811 0.339446
+-0.624032 0.666272 0.204124 0.353553
+-0.666272 0.624032 0.180564 0.366147
+-0.705659 0.579119 0.15623 0.377172
+-0.742024 0.531727 0.131227 0.386583
+-0.775212 0.482058 0.105662 0.394338
+-0.80508 0.430324 0.0796453 0.400404
+-0.831501 0.376748 0.0532871 0.404756
+-0.854361 0.321559 0.0267007 0.407374
+-0.873563 0.264992 1.32811e-08 0.408248
+-0.889024 0.207291 -0.0267007 0.407374
+-0.900678 0.148703 -0.0532871 0.404756
+-0.908475 0.089477 -0.0796453 0.400404
+-0.912382 0.0298684 -0.105662 0.394338
+0.912382 0.0298683 0.0532871 -0.404756
+0.908475 0.089477 0.0796453 -0.400404
+0.900678 0.148703 0.105662 -0.394338
+0.889024 0.207291 0.131227 -0.386583
+0.873563 0.264992 0.15623 -0.377172
+0.854361 0.321559 0.180564 -0.366147
+0.831501 0.376748 0.204124 -0.353553
+0.80508 0.430324 0.226811 -0.339446
+0.775212 0.482058 0.248526 -0.323885
+0.742024 0.531727 0.269177 -0.306937
+0.705659 0.579119 0.288675 -0.288675
+0.666272 0.624032 0.306937 -0.269177
+0.624032 0.666272 0.323885 -0.248526
+0.579119 0.705659 0.339446 -0.226811
+0.531727 0.742024 0.353553 -0.204124
+0.482058 0.775212 0.366147 -0.180564
+0.430324 0.80508 0.377172 -0.15623
+0.376748 0.831501 0.386583 -0.131227
+0.321559 0.854361 0.394338 -0.105662
+0.264992 0.873563 0.400404 -0.0796453
+0.207291 0.889024 0.404756 -0.0532871
+0.148702 0.900678 0.407374 -0.0267007
+0.0894769 0.908475 0.408248 5.01883e-08
+0.0298682 0.912382 0.407374 0.0267008
+-0.0298684 0.912382 0.404756 0.0532871
+-0.0894771 0.908475 0.400404 0.0796453
+-0.148703 0.900678 0.394338 0.105662
+-0.207291 0.889024 0.386583 0.131227
+-0.264993 0.873563 0.377172 0.15623
+-0.321559 0.854361 0.366147 0.180564
+-0.376748 0.831501 0.353553 0.204124
+-0.430324 0.80508 0.339446 0.226811
+-0.482058 0.775212 0.323885 0.248526
+-0.531727 0.742024 0.306937 0.269177
+-0.579119 0.705659 0.288675 0.288675
+-0.624032 0.666272 0.269177 0.306937
+-0.666272 0.624032 0.248526 0.323885
+-0.705659 0.579119 0.226811 0.339446
+-0.742024 0.531727 0.204124 0.353553
+-0.775212 0.482058 0.180564 0.366147
+-0.80508 0.430324 0.15623 0.377172
+-0.831501 0.376748 0.131227 0.386583
+-0.854361 0.321559 0.105662 0.394338
+-0.873563 0.264992 0.0796453 0.400404
+-0.889024 0.207291 0.0532871 0.404756
+-0.900678 0.148703 0.0267007 0.407374
+-0.908475 0.089477 -1.93664e-08 0.408248
+-0.912382 0.0298684 -0.0267007 0.407374
+0.933521 0.0305603 0.0515886 -0.353472
+0.929524 0.0915501 0.0745963 -0.349342
+0.921546 0.152148 0.0972846 -0.343715
+0.909622 0.212094 0.119556 -0.336616
+0.893803 0.271132 0.141316 -0.328076
+0.874156 0.329009 0.162471 -0.318131
+0.850766 0.385477 0.18293 -0.306824
+0.823733 0.440295 0.202605 -0.294203
+0.793173 0.493227 0.221413 -0.280322
+0.759216 0.544047 0.239273 -0.265241
+0.722008 0.592537 0.256108 -0.249023
+0.681709 0.63849 0.271847 -0.23174
+0.63849 0.681709 0.286421 -0.213464
+0.592537 0.722008 0.299769 -0.194274
+0.544047 0.759216 0.311834 -0.174252
+0.493227 0.793173 0.322563 -0.153484
+0.440295 0.823733 0.33191 -0.132059
+0.385477 0.850766 0.339837 -0.110068
+0.329009 0.874156 0.346308 -0.0876064
+0.271132 0.893803 0.351296 -0.0647692
+0.212094 0.909622 0.35478 -0.0416547
+0.152148 0.921546 0.356745 -0.0183617
+0.09155 0.929524 0.357182 0.00500984
+0.0305602 0.933521 0.35609 0.0283599
+-0.0305604 0.933521 0.353472 0.0515886
+-0.0915502 0.929524 0.349342 0.0745963
+-0.152148 0.921546 0.343715 0.0972847
+-0.212094 0.909622 0.336616 0.119556
+-0.271132 0.893803 0.328076 0.141316
+-0.329009 0.874156 0.318131 0.162471
+-0.385477 0.850766 0.306824 0.18293
+-0.440295 0.823733 0.294203 0.202605
+-0.493227 0.793173 0.280322 0.221413
+-0.544047 0.759216 0.265241 0.239273
+-0.592537 0.722008 0.249023 0.256108
+-0.63849 0.681709 0.23174 0.271847
+-0.681709 0.63849 0.213464 0.286421
+-0.722008 0.592537 0.194274 0.299769
+-0.759216 0.544047 0.174252 0.311834
+-0.793173 0.493227 0.153484 0.322563
+-0.823733 0.440295 0.132059 0.33191
+-0.850766 0.385477 0.110069 0.339837
+-0.874156 0.329009 0.0876064 0.346308
+-0.893803 0.271132 0.0647693 0.351296
+-0.909622 0.212094 0.0416547 0.35478
+-0.921546 0.152148 0.0183618 0.356745
+-0.929524 0.0915501 -0.00500981 0.357182
+-0.933521 0.0305604 -0.0283599 0.35609
+0.933521 0.0305603 0.12895 -0.333131
+0.929524 0.0915501 0.150462 -0.323984
+0.921546 0.152148 0.171329 -0.313449
+0.909622 0.212094 0.191463 -0.301573
+0.893803 0.271132 0.210777 -0.288405
+0.874156 0.329009 0.229188 -0.274002
+0.850766 0.385477 0.246618 -0.258425
+0.823733 0.440295 0.262992 -0.241743
+0.793173 0.493227 0.278239 -0.224025
+0.759216 0.544047 0.292296 -0.205347
+0.722008 0.592537 0.3051 -0.18579
+0.681709 0.63849 0.316598 -0.165438
+0.63849 0.681709 0.32674 -0.144377
+0.592537 0.722008 0.335484 -0.122698
+0.544047 0.759216 0.34279 -0.100494
+0.493227 0.793173 0.348629 -0.0778593
+0.440295 0.823733 0.352975 -0.0548912
+0.385477 0.850766 0.355809 -0.031688
+0.329009 0.874156 0.35712 -0.00834915
+0.271132 0.893803 0.356901 0.0150255
+0.212094 0.909622 0.355154 0.0383357
+0.152148 0.921546 0.351887 0.0614819
+0.09155 0.929524 0.347112 0.0843647
+0.0305602 0.933521 0.340851 0.106886
+-0.0305604 0.933521 0.333131 0.12895
+-0.0915502 0.929524 0.323984 0.150462
+-0.152148 0.921546 0.313449 0.171329
+-0.212094 0.909622 0.301573 0.191463
+-0.271132 0.893803 0.288405 0.210777
+-0.329009 0.874156 0.274002 0.229188
+-0.385477 0.850766 0.258425 0.246618
+-0.440295 0.823733 0.241743 0.262992
+-0.493227 0.793173 0.224025 0.278239
+-0.544047 0.759216 0.205347 0.292296
+-0.592537 0.722008 0.18579 0.3051
+-0.63849 0.681709 0.165438 0.316598
+-0.681709 0.63849 0.144377 0.32674
+-0.722008 0.592537 0.122698 0.335484
+-0.759216 0.544047 0.100494 0.34279
+-0.793173 0.493227 0.0778593 0.348629
+-0.823733 0.440295 0.0548913 0.352975
+-0.850766 0.385477 0.031688 0.355809
+-0.874156 0.329009 0.00834914 0.35712
+-0.893803 0.271132 -0.0150254 0.356901
+-0.909622 0.212094 -0.0383358 0.355154
+-0.921546 0.152148 -0.0614818 0.351887
+-0.929524 0.0915501 -0.0843647 0.347112
+-0.933521 0.0305604 -0.106886 0.340851
+0.951462 0.0311476 0.126365 -0.278894
+0.947388 0.0933095 0.144335 -0.270032
+0.939256 0.155072 0.161687 -0.260014
+0.927103 0.21617 0.178347 -0.248882
+0.91098 0.276343 0.194242 -0.236685
+0.890956 0.335332 0.209307 -0.223474
+0.867117 0.392885 0.223474 -0.209307
+0.839564 0.448756 0.236685 -0.194242
+0.808416 0.502706 0.248882 -0.178347
+0.773807 0.554502 0.260014 -0.161687
+0.735884 0.603924 0.270032 -0.144335
+0.69481 0.650761 0.278894 -0.126365
+0.65076 0.69481 0.286561 -0.107854
+0.603924 0.735884 0.293002 -0.0888812
+0.554502 0.773807 0.298188 -0.0695276
+0.502706 0.808416 0.302097 -0.0498763
+0.448756 0.839564 0.304712 -0.0300115
+0.392885 0.867117 0.306022 -0.0100181
+0.335332 0.890956 0.306022 0.0100181
+0.276343 0.91098 0.304712 0.0300115
+0.21617 0.927103 0.302097 0.0498764
+0.155072 0.939256 0.298188 0.0695277
+0.0933094 0.947388 0.293002 0.0888812
+0.0311475 0.951462 0.286561 0.107854
+-0.0311477 0.951462 0.278894 0.126365
+-0.0933096 0.947388 0.270032 0.144335
+-0.155072 0.939256 0.260014 0.161687
+-0.21617 0.927103 0.248882 0.178347
+-0.276343 0.91098 0.236685 0.194243
+-0.335332 0.890956 0.223474 0.209307
+-0.392886 0.867116 0.209307 0.223474
+-0.448756 0.839564 0.194243 0.236685
+-0.502706 0.808416 0.178347 0.248882
+-0.554502 0.773807 0.161687 0.260014
+-0.603924 0.735884 0.144335 0.270032
+-0.650761 0.69481 0.126365 0.278894
+-0.69481 0.650761 0.107854 0.286561
+-0.735884 0.603924 0.0888812 0.293002
+-0.773807 0.554502 0.0695276 0.298188
+-0.808416 0.502706 0.0498763 0.302097
+-0.839564 0.448756 0.0300115 0.304712
+-0.867117 0.392885 0.0100181 0.306022
+-0.890956 0.335332 -0.0100182 0.306022
+-0.91098 0.276343 -0.0300115 0.304712
+-0.927103 0.21617 -0.0498764 0.302097
+-0.939256 0.155072 -0.0695276 0.298188
+-0.947388 0.0933095 -0.0888812 0.293002
+-0.951462 0.0311477 -0.107854 0.286562
+0.951462 0.0311476 0.0498763 -0.302097
+0.947388 0.0933095 0.0695276 -0.298188
+0.939256 0.155072 0.0888812 -0.293002
+0.927103 0.21617 0.107854 -0.286561
+0.91098 0.276343 0.126365 -0.278894
+0.890956 0.335332 0.144335 -0.270032
+0.867117 0.392885 0.161687 -0.260014
+0.839564 0.448756 0.178347 -0.248882
+0.808416 0.502706 0.194242 -0.236685
+0.773807 0.554502 0.209307 -0.223474
+0.735884 0.603924 0.223474 -0.209307
+0.69481 0.650761 0.236685 -0.194242
+0.65076 0.69481 0.248882 -0.178347
+0.603924 0.735884 0.260014 -0.161687
+0.554502 0.773807 0.270032 -0.144335
+0.502706 0.808416 0.278894 -0.126365
+0.448756 0.839564 0.286562 -0.107854
+0.392885 0.867117 0.293002 -0.0888811
+0.335332 0.890956 0.298188 -0.0695276
+0.276343 0.91098 0.302097 -0.0498763
+0.21617 0.927103 0.304712 -0.0300115
+0.155072 0.939256 0.306022 -0.0100181
+0.0933094 0.947388 0.306022 0.0100182
+0.0311475 0.951462 0.304712 0.0300115
+-0.0311477 0.951462 0.302097 0.0498764
+-0.0933096 0.947388 0.298188 0.0695276
+-0.155072 0.939256 0.293002 0.0888812
+-0.21617 0.927103 0.286561 0.107854
+-0.276343 0.91098 0.278894 0.126365
+-0.335332 0.890956 0.270032 0.144335
+-0.392886 0.867116 0.260014 0.161687
+-0.448756 0.839564 0.248882 0.178347
+-0.502706 0.808416 0.236685 0.194242
+-0.554502 0.773807 0.223474 0.209307
+-0.603924 0.735884 0.209307 0.223474
+-0.650761 0.69481 0.194242 0.236685
+-0.69481 0.650761 0.178347 0.248882
+-0.735884 0.603924 0.161687 0.260014
+-0.773807 0.554502 0.144335 0.270032
+-0.808416 0.502706 0.126365 0.278894
+-0.839564 0.448756 0.107854 0.286561
+-0.867117 0.392885 0.0888812 0.293002
+-0.890956 0.335332 0.0695276 0.298188
+-0.91098 0.276343 0.0498764 0.302097
+-0.927103 0.21617 0.0300115 0.304712
+-0.939256 0.155072 0.0100182 0.306022
+-0.947388 0.0933095 -0.0100181 0.306022
+-0.951462 0.0311477 -0.0300115 0.304712
+0.966382 0.0316361 0.0481394 -0.250573
+0.962244 0.0947728 0.0644245 -0.246888
+0.953986 0.157504 0.0804338 -0.242146
+0.941642 0.21956 0.0960987 -0.236367
+0.925266 0.280676 0.111352 -0.229575
+0.904928 0.340591 0.126129 -0.221801
+0.880714 0.399046 0.140365 -0.213077
+0.85273 0.455794 0.154 -0.20344
+0.821094 0.510589 0.166976 -0.192933
+0.785942 0.563198 0.179237 -0.181599
+0.747424 0.613395 0.190731 -0.169487
+0.705706 0.660965 0.201407 -0.15665
+0.660965 0.705706 0.211221 -0.143142
+0.613395 0.747424 0.220131 -0.129021
+0.563198 0.785942 0.228098 -0.114348
+0.510589 0.821094 0.235089 -0.0991844
+0.455793 0.85273 0.241072 -0.0835965
+0.399046 0.880714 0.246023 -0.0676507
+0.340591 0.904928 0.249921 -0.0514151
+0.280676 0.925266 0.252749 -0.0349594
+0.21956 0.941642 0.254494 -0.018354
+0.157504 0.953986 0.25515 -0.00166994
+0.0947727 0.962244 0.254713 0.0150212
+0.031636 0.966382 0.253185 0.0316481
+-0.0316362 0.966382 0.250573 0.0481394
+-0.0947729 0.962244 0.246888 0.0644246
+-0.157504 0.953986 0.242146 0.0804339
+-0.21956 0.941642 0.236367 0.0960987
+-0.280676 0.925266 0.229575 0.111352
+-0.340591 0.904927 0.221801 0.126129
+-0.399047 0.880714 0.213077 0.140365
+-0.455793 0.85273 0.203441 0.154
+-0.510589 0.821094 0.192933 0.166976
+-0.563198 0.785941 0.181599 0.179237
+-0.613395 0.747424 0.169487 0.190731
+-0.660966 0.705706 0.15665 0.201407
+-0.705706 0.660966 0.143142 0.211221
+-0.747424 0.613395 0.129021 0.220131
+-0.785942 0.563198 0.114348 0.228098
+-0.821094 0.510589 0.0991844 0.235089
+-0.85273 0.455794 0.0835966 0.241072
+-0.880714 0.399046 0.0676507 0.246023
+-0.904928 0.340591 0.0514151 0.249921
+-0.925266 0.280676 0.0349594 0.252749
+-0.941642 0.21956 0.018354 0.254494
+-0.953986 0.157504 0.00166999 0.25515
+-0.962244 0.0947727 -0.0150212 0.254713
+-0.966382 0.0316362 -0.031648 0.253185
+0.933521 0.0305603 0.26072 -0.244191
+0.929524 0.0915501 0.276133 -0.226616
+0.921546 0.152148 0.290363 -0.208071
+0.909622 0.212094 0.30335 -0.188635
+0.893803 0.271132 0.315037 -0.168391
+0.874156 0.329009 0.325376 -0.147426
+0.850766 0.385477 0.334322 -0.12583
+0.823733 0.440295 0.341836 -0.103695
+0.793173 0.493227 0.347886 -0.0811156
+0.759216 0.544047 0.352446 -0.0581891
+0.722008 0.592537 0.355497 -0.0350134
+0.681709 0.63849 0.357026 -0.0116878
+0.63849 0.681709 0.357026 0.0116878
+0.592537 0.722008 0.355497 0.0350134
+0.544047 0.759216 0.352446 0.0581891
+0.493227 0.793173 0.347886 0.0811156
+0.440295 0.823733 0.341836 0.103695
+0.385477 0.850766 0.334322 0.12583
+0.329009 0.874156 0.325376 0.147426
+0.271132 0.893803 0.315037 0.168391
+0.212094 0.909622 0.30335 0.188635
+0.152148 0.921546 0.290363 0.208071
+0.09155 0.929524 0.276133 0.226616
+0.0305602 0.933521 0.26072 0.244191
+-0.0305604 0.933521 0.244191 0.26072
+-0.0915502 0.929524 0.226616 0.276133
+-0.152148 0.921546 0.208071 0.290363
+-0.212094 0.909622 0.188635 0.30335
+-0.271132 0.893803 0.168391 0.315038
+-0.329009 0.874156 0.147426 0.325376
+-0.385477 0.850766 0.12583 0.334322
+-0.440295 0.823733 0.103695 0.341836
+-0.493227 0.793173 0.0811156 0.347886
+-0.544047 0.759216 0.058189 0.352446
+-0.592537 0.722008 0.0350134 0.355497
+-0.63849 0.681709 0.0116878 0.357026
+-0.681709 0.63849 -0.0116878 0.357026
+-0.722008 0.592537 -0.0350134 0.355497
+-0.759216 0.544047 -0.058189 0.352446
+-0.793173 0.493227 -0.0811156 0.347886
+-0.823733 0.440295 -0.103695 0.341836
+-0.850766 0.385477 -0.12583 0.334322
+-0.874156 0.329009 -0.147426 0.325376
+-0.893803 0.271132 -0.168391 0.315037
+-0.909622 0.212094 -0.188635 0.30335
+-0.921546 0.152148 -0.208071 0.290363
+-0.929524 0.0915501 -0.226616 0.276133
+-0.933521 0.0305604 -0.244191 0.26072
+0.951462 0.0311476 0.248882 -0.178347
+0.947388 0.0933095 0.260014 -0.161687
+0.939256 0.155072 0.270032 -0.144335
+0.927103 0.21617 0.278894 -0.126365
+0.91098 0.276343 0.286561 -0.107854
+0.890956 0.335332 0.293002 -0.0888812
+0.867117 0.392885 0.298188 -0.0695276
+0.839564 0.448756 0.302097 -0.0498763
+0.808416 0.502706 0.304712 -0.0300115
+0.773807 0.554502 0.306022 -0.0100181
+0.735884 0.603924 0.306022 0.0100181
+0.69481 0.650761 0.304712 0.0300115
+0.65076 0.69481 0.302097 0.0498764
+0.603924 0.735884 0.298188 0.0695276
+0.554502 0.773807 0.293002 0.0888812
+0.502706 0.808416 0.286561 0.107854
+0.448756 0.839564 0.278894 0.126365
+0.392885 0.867117 0.270032 0.144335
+0.335332 0.890956 0.260014 0.161687
+0.276343 0.91098 0.248882 0.178347
+0.21617 0.927103 0.236685 0.194242
+0.155072 0.939256 0.223474 0.209307
+0.0933094 0.947388 0.209307 0.223474
+0.0311475 0.951462 0.194242 0.236685
+-0.0311477 0.951462 0.178347 0.248882
+-0.0933096 0.947388 0.161687 0.260014
+-0.155072 0.939256 0.144335 0.270032
+-0.21617 0.927103 0.126365 0.278894
+-0.276343 0.91098 0.107854 0.286562
+-0.335332 0.890956 0.0888811 0.293002
+-0.392886 0.867116 0.0695276 0.298188
+-0.448756 0.839564 0.0498764 0.302097
+-0.502706 0.808416 0.0300115 0.304712
+-0.554502 0.773807 0.0100181 0.306022
+-0.603924 0.735884 -0.0100181 0.306022
+-0.650761 0.69481 -0.0300115 0.304712
+-0.69481 0.650761 -0.0498763 0.302097
+-0.735884 0.603924 -0.0695276 0.298188
+-0.773807 0.554502 -0.0888811 0.293002
+-0.808416 0.502706 -0.107854 0.286561
+-0.839564 0.448756 -0.126365 0.278894
+-0.867117 0.392885 -0.144335 0.270032
+-0.890956 0.335332 -0.161687 0.260014
+-0.91098 0.276343 -0.178347 0.248882
+-0.927103 0.21617 -0.194242 0.236685
+-0.939256 0.155072 -0.209307 0.223474
+-0.947388 0.0933095 -0.223474 0.209307
+-0.951462 0.0311477 -0.236685 0.194243
+0.951462 0.0311476 0.194242 -0.236685
+0.947388 0.0933095 0.209307 -0.223474
+0.939256 0.155072 0.223474 -0.209307
+0.927103 0.21617 0.236685 -0.194242
+0.91098 0.276343 0.248882 -0.178347
+0.890956 0.335332 0.260014 -0.161687
+0.867117 0.392885 0.270032 -0.144335
+0.839564 0.448756 0.278894 -0.126365
+0.808416 0.502706 0.286561 -0.107854
+0.773807 0.554502 0.293002 -0.0888812
+0.735884 0.603924 0.298188 -0.0695276
+0.69481 0.650761 0.302097 -0.0498763
+0.65076 0.69481 0.304712 -0.0300115
+0.603924 0.735884 0.306022 -0.0100181
+0.554502 0.773807 0.306022 0.0100181
+0.502706 0.808416 0.304712 0.0300115
+0.448756 0.839564 0.302097 0.0498764
+0.392885 0.867117 0.298188 0.0695276
+0.335332 0.890956 0.293002 0.0888812
+0.276343 0.91098 0.286561 0.107854
+0.21617 0.927103 0.278894 0.126365
+0.155072 0.939256 0.270032 0.144335
+0.0933094 0.947388 0.260014 0.161687
+0.0311475 0.951462 0.248882 0.178347
+-0.0311477 0.951462 0.236685 0.194243
+-0.0933096 0.947388 0.223474 0.209307
+-0.155072 0.939256 0.209307 0.223474
+-0.21617 0.927103 0.194242 0.236685
+-0.276343 0.91098 0.178347 0.248882
+-0.335332 0.890956 0.161687 0.260014
+-0.392886 0.867116 0.144335 0.270032
+-0.448756 0.839564 0.126365 0.278894
+-0.502706 0.808416 0.107854 0.286561
+-0.554502 0.773807 0.0888811 0.293002
+-0.603924 0.735884 0.0695276 0.298188
+-0.650761 0.69481 0.0498763 0.302097
+-0.69481 0.650761 0.0300115 0.304712
+-0.735884 0.603924 0.0100181 0.306022
+-0.773807 0.554502 -0.0100181 0.306022
+-0.808416 0.502706 -0.0300115 0.304712
+-0.839564 0.448756 -0.0498763 0.302097
+-0.867117 0.392885 -0.0695276 0.298188
+-0.890956 0.335332 -0.0888812 0.293002
+-0.91098 0.276343 -0.107854 0.286561
+-0.927103 0.21617 -0.126365 0.278894
+-0.939256 0.155072 -0.144335 0.270032
+-0.947388 0.0933095 -0.161687 0.260014
+-0.951462 0.0311477 -0.178347 0.248882
+0.966382 0.0316361 0.186229 -0.174422
+0.962244 0.0947728 0.197238 -0.161869
+0.953986 0.157504 0.207402 -0.148622
+0.941642 0.21956 0.216678 -0.134739
+0.925266 0.280676 0.225027 -0.120279
+0.904928 0.340591 0.232412 -0.105304
+0.880714 0.399046 0.238801 -0.0898784
+0.85273 0.455794 0.244168 -0.0740676
+0.821094 0.510589 0.24849 -0.0579397
+0.785942 0.563198 0.251747 -0.0415636
+0.747424 0.613395 0.253927 -0.0250096
+0.705706 0.660965 0.255019 -0.00834844
+0.660965 0.705706 0.255019 0.00834845
+0.613395 0.747424 0.253927 0.0250096
+0.563198 0.785942 0.251747 0.0415636
+0.510589 0.821094 0.24849 0.0579397
+0.455793 0.85273 0.244168 0.0740677
+0.399046 0.880714 0.238801 0.0898784
+0.340591 0.904928 0.232412 0.105304
+0.280676 0.925266 0.225027 0.120279
+0.21956 0.941642 0.216678 0.134739
+0.157504 0.953986 0.207402 0.148622
+0.0947727 0.962244 0.197238 0.161869
+0.031636 0.966382 0.186229 0.174422
+-0.0316362 0.966382 0.174422 0.186229
+-0.0947729 0.962244 0.161869 0.197238
+-0.157504 0.953986 0.148622 0.207402
+-0.21956 0.941642 0.134739 0.216678
+-0.280676 0.925266 0.120279 0.225027
+-0.340591 0.904927 0.105304 0.232412
+-0.399047 0.880714 0.0898784 0.238801
+-0.455793 0.85273 0.0740677 0.244168
+-0.510589 0.821094 0.0579397 0.24849
+-0.563198 0.785941 0.0415636 0.251747
+-0.613395 0.747424 0.0250096 0.253927
+-0.660966 0.705706 0.00834843 0.255019
+-0.705706 0.660966 -0.00834843 0.255019
+-0.747424 0.613395 -0.0250096 0.253927
+-0.785942 0.563198 -0.0415636 0.251747
+-0.821094 0.510589 -0.0579397 0.24849
+-0.85273 0.455794 -0.0740676 0.244168
+-0.880714 0.399046 -0.0898784 0.238801
+-0.904928 0.340591 -0.105304 0.232412
+-0.925266 0.280676 -0.120279 0.225027
+-0.941642 0.21956 -0.134739 0.216678
+-0.953986 0.157504 -0.148622 0.207402
+-0.962244 0.0947727 -0.161869 0.197238
+-0.966382 0.0316362 -0.174422 0.186229
+0.966382 0.0316361 0.231013 -0.108337
+0.962244 0.0947728 0.237604 -0.0929965
+0.953986 0.157504 0.243178 -0.0772574
+0.941642 0.21956 0.24771 -0.0611873
+0.925266 0.280676 0.251182 -0.0448553
+0.904928 0.340591 0.253577 -0.0283312
+0.880714 0.399046 0.254887 -0.0116858
+0.85273 0.455794 0.255106 0.00500964
+0.821094 0.510589 0.254232 0.0216836
+0.785942 0.563198 0.25227 0.0382648
+0.747424 0.613395 0.249227 0.0546821
+0.705706 0.660965 0.245117 0.0708652
+0.660965 0.705706 0.239957 0.0867449
+0.613395 0.747424 0.23377 0.102253
+0.563198 0.785942 0.226582 0.117324
+0.510589 0.821094 0.218423 0.131891
+0.455793 0.85273 0.20933 0.145895
+0.399046 0.880714 0.19934 0.159273
+0.340591 0.904928 0.188496 0.17197
+0.280676 0.925266 0.176845 0.18393
+0.21956 0.941642 0.164437 0.195102
+0.157504 0.953986 0.151324 0.205439
+0.0947727 0.962244 0.137564 0.214896
+0.031636 0.966382 0.123215 0.223433
+-0.0316362 0.966382 0.108337 0.231013
+-0.0947729 0.962244 0.0929965 0.237604
+-0.157504 0.953986 0.0772573 0.243178
+-0.21956 0.941642 0.0611873 0.24771
+-0.280676 0.925266 0.0448553 0.251182
+-0.340591 0.904927 0.0283312 0.253577
+-0.399047 0.880714 0.0116858 0.254887
+-0.455793 0.85273 -0.00500961 0.255106
+-0.510589 0.821094 -0.0216836 0.254232
+-0.563198 0.785941 -0.0382648 0.25227
+-0.613395 0.747424 -0.0546821 0.249227
+-0.660966 0.705706 -0.0708652 0.245117
+-0.705706 0.660966 -0.0867449 0.239957
+-0.747424 0.613395 -0.102253 0.23377
+-0.785942 0.563198 -0.117323 0.226582
+-0.821094 0.510589 -0.131891 0.218423
+-0.85273 0.455794 -0.145895 0.20933
+-0.880714 0.399046 -0.159273 0.19934
+-0.904928 0.340591 -0.17197 0.188496
+-0.925266 0.280676 -0.18393 0.176845
+-0.941642 0.21956 -0.195102 0.164437
+-0.953986 0.157504 -0.205439 0.151324
+-0.962244 0.0947727 -0.214896 0.137564
+-0.966382 0.0316362 -0.223433 0.123215
+0.978421 0.0320302 0.201398 -0.0332509
+0.974231 0.0959534 0.203141 -0.0200077
+0.96587 0.159466 0.204015 -0.00667876
+0.953372 0.222295 0.204015 0.00667876
+0.936792 0.284173 0.203141 0.0200077
+0.9162 0.344833 0.201398 0.0332509
+0.891686 0.404017 0.198792 0.0463517
+0.863352 0.461472 0.195335 0.0592541
+0.831322 0.516949 0.191041 0.0719027
+0.795732 0.570214 0.185929 0.0842435
+0.756735 0.621036 0.180021 0.0962235
+0.714497 0.669199 0.173343 0.107791
+0.669199 0.714497 0.165922 0.118898
+0.621036 0.756735 0.15779 0.129495
+0.570214 0.795732 0.148983 0.139538
+0.516949 0.831322 0.139538 0.148983
+0.461471 0.863352 0.129495 0.15779
+0.404017 0.891686 0.118898 0.165922
+0.344833 0.9162 0.107791 0.173343
+0.284173 0.936792 0.0962234 0.180021
+0.222295 0.953372 0.0842435 0.185929
+0.159466 0.96587 0.0719027 0.191041
+0.0959533 0.974231 0.0592541 0.195335
+0.0320301 0.978421 0.0463517 0.198792
+-0.0320303 0.978421 0.0332509 0.201398
+-0.0959535 0.974231 0.0200076 0.203141
+-0.159466 0.96587 0.00667874 0.204015
+-0.222295 0.953372 -0.00667877 0.204015
+-0.284173 0.936792 -0.0200077 0.203141
+-0.344834 0.9162 -0.0332509 0.201398
+-0.404018 0.891686 -0.0463518 0.198792
+-0.461471 0.863352 -0.0592541 0.195335
+-0.516949 0.831322 -0.0719027 0.191041
+-0.570214 0.795732 -0.0842435 0.185929
+-0.621036 0.756735 -0.0962234 0.180021
+-0.669199 0.714497 -0.107791 0.173343
+-0.714497 0.669199 -0.118898 0.165922
+-0.756735 0.621036 -0.129495 0.15779
+-0.795732 0.570214 -0.139538 0.148983
+-0.831322 0.516949 -0.148983 0.139538
+-0.863352 0.461472 -0.15779 0.129495
+-0.891686 0.404017 -0.165922 0.118898
+-0.9162 0.344833 -0.173343 0.107791
+-0.936792 0.284173 -0.180021 0.0962235
+-0.953372 0.222295 -0.185929 0.0842435
+-0.96587 0.159466 -0.191041 0.0719028
+-0.974231 0.0959533 -0.195335 0.0592541
+-0.978421 0.0320303 -0.198792 0.0463518
+0.978421 0.0320302 0.173343 -0.107791
+0.974231 0.0959534 0.180021 -0.0962235
+0.96587 0.159466 0.185929 -0.0842435
+0.953372 0.222295 0.191041 -0.0719027
+0.936792 0.284173 0.195335 -0.0592541
+0.9162 0.344833 0.198792 -0.0463518
+0.891686 0.404017 0.201398 -0.0332509
+0.863352 0.461472 0.203141 -0.0200077
+0.831322 0.516949 0.204015 -0.00667875
+0.795732 0.570214 0.204015 0.00667875
+0.756735 0.621036 0.203141 0.0200077
+0.714497 0.669199 0.201398 0.0332509
+0.669199 0.714497 0.198792 0.0463518
+0.621036 0.756735 0.195335 0.0592541
+0.570214 0.795732 0.191041 0.0719027
+0.516949 0.831322 0.185929 0.0842435
+0.461471 0.863352 0.180021 0.0962235
+0.404017 0.891686 0.173343 0.107791
+0.344833 0.9162 0.165922 0.118898
+0.284173 0.936792 0.15779 0.129495
+0.222295 0.953372 0.148983 0.139538
+0.159466 0.96587 0.139538 0.148983
+0.0959533 0.974231 0.129495 0.15779
+0.0320301 0.978421 0.118898 0.165922
+-0.0320303 0.978421 0.107791 0.173343
+-0.0959535 0.974231 0.0962234 0.180021
+-0.159466 0.96587 0.0842435 0.185929
+-0.222295 0.953372 0.0719027 0.191041
+-0.284173 0.936792 0.0592541 0.195335
+-0.344834 0.9162 0.0463517 0.198792
+-0.404018 0.891686 0.0332509 0.201398
+-0.461471 0.863352 0.0200077 0.203141
+-0.516949 0.831322 0.00667876 0.204015
+-0.570214 0.795732 -0.00667877 0.204015
+-0.621036 0.756735 -0.0200077 0.203141
+-0.669199 0.714497 -0.0332509 0.201398
+-0.714497 0.669199 -0.0463517 0.198792
+-0.756735 0.621036 -0.0592541 0.195335
+-0.795732 0.570214 -0.0719027 0.191041
+-0.831322 0.516949 -0.0842435 0.185929
+-0.863352 0.461472 -0.0962234 0.180021
+-0.891686 0.404017 -0.107791 0.173343
+-0.9162 0.344833 -0.118898 0.165922
+-0.936792 0.284173 -0.129495 0.15779
+-0.953372 0.222295 -0.139538 0.148983
+-0.96587 0.159466 -0.148983 0.139538
+-0.974231 0.0959533 -0.15779 0.129495
+-0.978421 0.0320303 -0.165922 0.118898
+0.987683 0.0323334 0.149094 -0.0347638
+0.983453 0.0968617 0.151048 -0.0249382
+0.975013 0.160975 0.152356 -0.0150057
+0.962397 0.224399 0.153011 -0.00500906
+0.94566 0.286863 0.153011 0.00500907
+0.924873 0.348098 0.152356 0.0150057
+0.900126 0.407842 0.151048 0.0249382
+0.871525 0.46584 0.149094 0.0347638
+0.839192 0.521843 0.146501 0.0444406
+0.803265 0.575611 0.143281 0.0539271
+0.763898 0.626915 0.139447 0.0631826
+0.72126 0.675534 0.135016 0.0721676
+0.675534 0.72126 0.130007 0.0808436
+0.626915 0.763898 0.124441 0.0891733
+0.575611 0.803265 0.118343 0.0971212
+0.521843 0.839192 0.111737 0.104653
+0.46584 0.871525 0.104653 0.111737
+0.407842 0.900126 0.0971212 0.118343
+0.348098 0.924873 0.0891733 0.124441
+0.286863 0.94566 0.0808435 0.130007
+0.224399 0.962397 0.0721676 0.135016
+0.160975 0.975013 0.0631826 0.139447
+0.0968616 0.983453 0.053927 0.143281
+0.0323333 0.987683 0.0444406 0.146501
+-0.0323335 0.987683 0.0347638 0.149094
+-0.0968618 0.983453 0.0249382 0.151048
+-0.160975 0.975013 0.0150057 0.152356
+-0.224399 0.962397 0.00500906 0.153011
+-0.286863 0.94566 -0.00500909 0.153011
+-0.348098 0.924873 -0.0150058 0.152356
+-0.407842 0.900126 -0.0249382 0.151048
+-0.46584 0.871525 -0.0347638 0.149094
+-0.521843 0.839192 -0.0444406 0.146501
+-0.575611 0.803265 -0.0539271 0.143281
+-0.626915 0.763898 -0.0631826 0.139447
+-0.675534 0.72126 -0.0721676 0.135016
+-0.72126 0.675534 -0.0808435 0.130007
+-0.763898 0.626915 -0.0891733 0.124441
+-0.803265 0.575611 -0.0971212 0.118343
+-0.839192 0.521843 -0.104653 0.111737
+-0.871525 0.46584 -0.111737 0.104653
+-0.900126 0.407842 -0.118343 0.0971212
+-0.924873 0.348098 -0.124441 0.0891733
+-0.94566 0.286863 -0.130007 0.0808436
+-0.962397 0.224399 -0.135016 0.0721676
+-0.975013 0.160975 -0.139447 0.0631826
+-0.983453 0.0968616 -0.143281 0.053927
+-0.987683 0.0323335 -0.146501 0.0444406
+0.966382 0.0316361 0.123215 -0.223433
+0.962244 0.0947728 0.137564 -0.214896
+0.953986 0.157504 0.151324 -0.205439
+0.941642 0.21956 0.164437 -0.195102
+0.925266 0.280676 0.176845 -0.18393
+0.904928 0.340591 0.188496 -0.17197
+0.880714 0.399046 0.19934 -0.159273
+0.85273 0.455794 0.20933 -0.145895
+0.821094 0.510589 0.218423 -0.131891
+0.785942 0.563198 0.226582 -0.117324
+0.747424 0.613395 0.23377 -0.102253
+0.705706 0.660965 0.239957 -0.0867449
+0.660965 0.705706 0.245117 -0.0708652
+0.613395 0.747424 0.249227 -0.0546821
+0.563198 0.785942 0.25227 -0.0382648
+0.510589 0.821094 0.254232 -0.0216836
+0.455793 0.85273 0.255106 -0.00500962
+0.399046 0.880714 0.254887 0.0116858
+0.340591 0.904928 0.253577 0.0283312
+0.280676 0.925266 0.251182 0.0448553
+0.21956 0.941642 0.24771 0.0611874
+0.157504 0.953986 0.243178 0.0772574
+0.0947727 0.962244 0.237604 0.0929966
+0.031636 0.966382 0.231013 0.108338
+-0.0316362 0.966382 0.223433 0.123215
+-0.0947729 0.962244 0.214896 0.137564
+-0.157504 0.953986 0.205439 0.151324
+-0.21956 0.941642 0.195102 0.164437
+-0.280676 0.925266 0.18393 0.176845
+-0.340591 0.904927 0.171969 0.188496
+-0.399047 0.880714 0.159273 0.19934
+-0.455793 0.85273 0.145895 0.20933
+-0.510589 0.821094 0.131891 0.218423
+-0.563198 0.785941 0.117323 0.226582
+-0.613395 0.747424 0.102253 0.23377
+-0.660966 0.705706 0.0867449 0.239957
+-0.705706 0.660966 0.0708652 0.245117
+-0.747424 0.613395 0.0546821 0.249227
+-0.785942 0.563198 0.0382648 0.25227
+-0.821094 0.510589 0.0216836 0.254232
+-0.85273 0.455794 0.00500967 0.255106
+-0.880714 0.399046 -0.0116858 0.254887
+-0.904928 0.340591 -0.0283313 0.253577
+-0.925266 0.280676 -0.0448553 0.251182
+-0.941642 0.21956 -0.0611874 0.24771
+-0.953986 0.157504 -0.0772573 0.243178
+-0.962244 0.0947727 -0.0929965 0.237604
+-0.966382 0.0316362 -0.108337 0.231013
+0.978421 0.0320302 0.118898 -0.165922
+0.974231 0.0959534 0.129495 -0.15779
+0.96587 0.159466 0.139538 -0.148983
+0.953372 0.222295 0.148983 -0.139538
+0.936792 0.284173 0.15779 -0.129495
+0.9162 0.344833 0.165922 -0.118898
+0.891686 0.404017 0.173343 -0.107791
+0.863352 0.461472 0.180021 -0.0962235
+0.831322 0.516949 0.185929 -0.0842435
+0.795732 0.570214 0.191041 -0.0719027
+0.756735 0.621036 0.195335 -0.0592541
+0.714497 0.669199 0.198792 -0.0463517
+0.669199 0.714497 0.201398 -0.0332509
+0.621036 0.756735 0.203141 -0.0200077
+0.570214 0.795732 0.204015 -0.00667874
+0.516949 0.831322 0.204015 0.00667877
+0.461471 0.863352 0.203141 0.0200077
+0.404017 0.891686 0.201398 0.0332509
+0.344833 0.9162 0.198792 0.0463518
+0.284173 0.936792 0.195335 0.0592541
+0.222295 0.953372 0.191041 0.0719027
+0.159466 0.96587 0.185929 0.0842435
+0.0959533 0.974231 0.180021 0.0962235
+0.0320301 0.978421 0.173343 0.107791
+-0.0320303 0.978421 0.165922 0.118898
+-0.0959535 0.974231 0.15779 0.129495
+-0.159466 0.96587 0.148983 0.139538
+-0.222295 0.953372 0.139538 0.148983
+-0.284173 0.936792 0.129495 0.15779
+-0.344834 0.9162 0.118898 0.165922
+-0.404018 0.891686 0.107791 0.173343
+-0.461471 0.863352 0.0962235 0.180021
+-0.516949 0.831322 0.0842435 0.185929
+-0.570214 0.795732 0.0719027 0.191041
+-0.621036 0.756735 0.0592541 0.195335
+-0.669199 0.714497 0.0463517 0.198792
+-0.714497 0.669199 0.0332509 0.201398
+-0.756735 0.621036 0.0200077 0.203141
+-0.795732 0.570214 0.00667877 0.204015
+-0.831322 0.516949 -0.00667876 0.204015
+-0.863352 0.461472 -0.0200076 0.203141
+-0.891686 0.404017 -0.0332509 0.201398
+-0.9162 0.344833 -0.0463518 0.198792
+-0.936792 0.284173 -0.0592541 0.195335
+-0.953372 0.222295 -0.0719028 0.191041
+-0.96587 0.159466 -0.0842435 0.185929
+-0.974231 0.0959533 -0.0962235 0.180021
+-0.978421 0.0320303 -0.107791 0.173343
+0.978421 0.0320302 0.0463517 -0.198792
+0.974231 0.0959534 0.0592541 -0.195335
+0.96587 0.159466 0.0719027 -0.191041
+0.953372 0.222295 0.0842435 -0.185929
+0.936792 0.284173 0.0962235 -0.180021
+0.9162 0.344833 0.107791 -0.173343
+0.891686 0.404017 0.118898 -0.165922
+0.863352 0.461472 0.129495 -0.15779
+0.831322 0.516949 0.139538 -0.148983
+0.795732 0.570214 0.148983 -0.139538
+0.756735 0.621036 0.15779 -0.129495
+0.714497 0.669199 0.165922 -0.118898
+0.669199 0.714497 0.173343 -0.107791
+0.621036 0.756735 0.180021 -0.0962235
+0.570214 0.795732 0.185929 -0.0842435
+0.516949 0.831322 0.191041 -0.0719027
+0.461471 0.863352 0.195335 -0.0592541
+0.404017 0.891686 0.198792 -0.0463517
+0.344833 0.9162 0.201398 -0.0332509
+0.284173 0.936792 0.203141 -0.0200077
+0.222295 0.953372 0.204015 -0.00667875
+0.159466 0.96587 0.204015 0.00667878
+0.0959533 0.974231 0.203141 0.0200077
+0.0320301 0.978421 0.201398 0.0332509
+-0.0320303 0.978421 0.198792 0.0463518
+-0.0959535 0.974231 0.195335 0.0592541
+-0.159466 0.96587 0.191041 0.0719028
+-0.222295 0.953372 0.185929 0.0842435
+-0.284173 0.936792 0.180021 0.0962235
+-0.344834 0.9162 0.173343 0.107791
+-0.404018 0.891686 0.165922 0.118898
+-0.461471 0.863352 0.15779 0.129495
+-0.516949 0.831322 0.148983 0.139538
+-0.570214 0.795732 0.139538 0.148983
+-0.621036 0.756735 0.129495 0.15779
+-0.669199 0.714497 0.118898 0.165922
+-0.714497 0.669199 0.107791 0.173343
+-0.756735 0.621036 0.0962234 0.180021
+-0.795732 0.570214 0.0842435 0.185929
+-0.831322 0.516949 0.0719027 0.191041
+-0.863352 0.461472 0.0592541 0.195335
+-0.891686 0.404017 0.0463517 0.198792
+-0.9162 0.344833 0.0332509 0.201398
+-0.936792 0.284173 0.0200077 0.203141
+-0.953372 0.222295 0.00667874 0.204015
+-0.96587 0.159466 -0.00667874 0.204015
+-0.974231 0.0959533 -0.0200077 0.203141
+-0.978421 0.0320303 -0.0332509 0.201398
+0.987683 0.0323334 0.0444406 -0.146501
+0.983453 0.0968617 0.0539271 -0.143281
+0.975013 0.160975 0.0631826 -0.139447
+0.962397 0.224399 0.0721676 -0.135016
+0.94566 0.286863 0.0808435 -0.130007
+0.924873 0.348098 0.0891733 -0.124441
+0.900126 0.407842 0.0971212 -0.118343
+0.871525 0.46584 0.104653 -0.111737
+0.839192 0.521843 0.111737 -0.104653
+0.803265 0.575611 0.118343 -0.0971212
+0.763898 0.626915 0.124441 -0.0891733
+0.72126 0.675534 0.130007 -0.0808435
+0.675534 0.72126 0.135016 -0.0721676
+0.626915 0.763898 0.139447 -0.0631826
+0.575611 0.803265 0.143281 -0.053927
+0.521843 0.839192 0.146501 -0.0444406
+0.46584 0.871525 0.149094 -0.0347638
+0.407842 0.900126 0.151048 -0.0249382
+0.348098 0.924873 0.152356 -0.0150057
+0.286863 0.94566 0.153011 -0.00500906
+0.224399 0.962397 0.153011 0.00500907
+0.160975 0.975013 0.152356 0.0150058
+0.0968616 0.983453 0.151048 0.0249382
+0.0323333 0.987683 0.149094 0.0347638
+-0.0323335 0.987683 0.146501 0.0444406
+-0.0968618 0.983453 0.143281 0.0539271
+-0.160975 0.975013 0.139447 0.0631826
+-0.224399 0.962397 0.135016 0.0721676
+-0.286863 0.94566 0.130007 0.0808436
+-0.348098 0.924873 0.124441 0.0891733
+-0.407842 0.900126 0.118343 0.0971213
+-0.46584 0.871525 0.111737 0.104653
+-0.521843 0.839192 0.104653 0.111737
+-0.575611 0.803265 0.0971212 0.118343
+-0.626915 0.763898 0.0891733 0.124441
+-0.675534 0.72126 0.0808435 0.130007
+-0.72126 0.675534 0.0721676 0.135016
+-0.763898 0.626915 0.0631826 0.139447
+-0.803265 0.575611 0.0539271 0.143281
+-0.839192 0.521843 0.0444406 0.146501
+-0.871525 0.46584 0.0347638 0.149094
+-0.900126 0.407842 0.0249382 0.151048
+-0.924873 0.348098 0.0150057 0.152356
+-0.94566 0.286863 0.00500907 0.153011
+-0.962397 0.224399 -0.00500908 0.153011
+-0.975013 0.160975 -0.0150057 0.152356
+-0.983453 0.0968616 -0.0249382 0.151048
+-0.987683 0.0323335 -0.0347638 0.149094
+0.987683 0.0323334 0.111737 -0.104653
+0.983453 0.0968617 0.118343 -0.0971212
+0.975013 0.160975 0.124441 -0.0891733
+0.962397 0.224399 0.130007 -0.0808435
+0.94566 0.286863 0.135016 -0.0721676
+0.924873 0.348098 0.139447 -0.0631826
+0.900126 0.407842 0.143281 -0.0539271
+0.871525 0.46584 0.146501 -0.0444406
+0.839192 0.521843 0.149094 -0.0347638
+0.803265 0.575611 0.151048 -0.0249382
+0.763898 0.626915 0.152356 -0.0150058
+0.72126 0.675534 0.153011 -0.00500906
+0.675534 0.72126 0.153011 0.00500907
+0.626915 0.763898 0.152356 0.0150057
+0.575611 0.803265 0.151048 0.0249382
+0.521843 0.839192 0.149094 0.0347638
+0.46584 0.871525 0.146501 0.0444406
+0.407842 0.900126 0.143281 0.0539271
+0.348098 0.924873 0.139447 0.0631826
+0.286863 0.94566 0.135016 0.0721676
+0.224399 0.962397 0.130007 0.0808436
+0.160975 0.975013 0.124441 0.0891733
+0.0968616 0.983453 0.118343 0.0971213
+0.0323333 0.987683 0.111737 0.104653
+-0.0323335 0.987683 0.104653 0.111737
+-0.0968618 0.983453 0.0971212 0.118343
+-0.160975 0.975013 0.0891733 0.124441
+-0.224399 0.962397 0.0808435 0.130007
+-0.286863 0.94566 0.0721676 0.135016
+-0.348098 0.924873 0.0631826 0.139447
+-0.407842 0.900126 0.053927 0.143281
+-0.46584 0.871525 0.0444406 0.146501
+-0.521843 0.839192 0.0347638 0.149094
+-0.575611 0.803265 0.0249382 0.151048
+-0.626915 0.763898 0.0150058 0.152356
+-0.675534 0.72126 0.00500906 0.153011
+-0.72126 0.675534 -0.00500906 0.153011
+-0.763898 0.626915 -0.0150058 0.152356
+-0.803265 0.575611 -0.0249382 0.151048
+-0.839192 0.521843 -0.0347638 0.149094
+-0.871525 0.46584 -0.0444406 0.146501
+-0.900126 0.407842 -0.0539271 0.143281
+-0.924873 0.348098 -0.0631826 0.139447
+-0.94566 0.286863 -0.0721676 0.135016
+-0.962397 0.224399 -0.0808436 0.130007
+-0.975013 0.160975 -0.0891733 0.124441
+-0.983453 0.0968616 -0.0971212 0.118343
+-0.987683 0.0323335 -0.104653 0.111737
+0.994245 0.0325482 0.0955205 -0.0359514
+0.989988 0.0975053 0.0976673 -0.0296271
+0.981491 0.162045 0.0993959 -0.0231759
+0.968791 0.22589 0.100699 -0.0166254
+0.951943 0.288769 0.101571 -0.0100038
+0.931019 0.350411 0.102007 -0.00333938
+0.906107 0.410552 0.102007 0.00333938
+0.877316 0.468935 0.101571 0.0100038
+0.844768 0.52531 0.100699 0.0166255
+0.808602 0.579436 0.0993959 0.0231759
+0.768974 0.63108 0.0976673 0.0296271
+0.726053 0.680023 0.0955205 0.0359514
+0.680023 0.726053 0.0929646 0.0421217
+0.631081 0.768974 0.0900107 0.0481117
+0.579436 0.808602 0.0866713 0.0538957
+0.52531 0.844768 0.0829608 0.0594489
+0.468935 0.877316 0.078895 0.0647475
+0.410552 0.906107 0.0744914 0.0697689
+0.350411 0.931019 0.0697688 0.0744914
+0.288769 0.951943 0.0647475 0.0788951
+0.22589 0.968791 0.0594489 0.0829608
+0.162045 0.981491 0.0538957 0.0866713
+0.0975052 0.989988 0.0481117 0.0900107
+0.0325481 0.994245 0.0421217 0.0929647
+-0.0325483 0.994245 0.0359514 0.0955205
+-0.0975054 0.989988 0.029627 0.0976673
+-0.162045 0.981491 0.0231759 0.0993959
+-0.225891 0.968791 0.0166254 0.100699
+-0.288769 0.951943 0.0100038 0.101571
+-0.350411 0.931019 0.00333936 0.102007
+-0.410552 0.906107 -0.00333939 0.102007
+-0.468935 0.877316 -0.0100038 0.101571
+-0.52531 0.844768 -0.0166254 0.100699
+-0.579436 0.808602 -0.0231759 0.0993959
+-0.63108 0.768974 -0.0296271 0.0976673
+-0.680023 0.726053 -0.0359514 0.0955205
+-0.726053 0.680023 -0.0421217 0.0929647
+-0.768974 0.63108 -0.0481117 0.0900107
+-0.808602 0.579436 -0.0538957 0.0866713
+-0.844768 0.52531 -0.0594489 0.0829608
+-0.877316 0.468935 -0.0647475 0.0788951
+-0.906107 0.410552 -0.0697688 0.0744914
+-0.931019 0.350411 -0.0744914 0.0697688
+-0.951943 0.288769 -0.078895 0.0647475
+-0.968791 0.22589 -0.0829608 0.0594489
+-0.981491 0.162045 -0.0866713 0.0538957
+-0.989988 0.0975053 -0.0900107 0.0481117
+-0.994245 0.0325483 -0.0929646 0.0421217
+0.994245 0.0325482 0.0421217 -0.0929646
+0.989988 0.0975053 0.0481117 -0.0900107
+0.981491 0.162045 0.0538957 -0.0866713
+0.968791 0.22589 0.0594489 -0.0829608
+0.951943 0.288769 0.0647475 -0.078895
+0.931019 0.350411 0.0697688 -0.0744914
+0.906107 0.410552 0.0744914 -0.0697688
+0.877316 0.468935 0.0788951 -0.0647475
+0.844768 0.52531 0.0829608 -0.0594489
+0.808602 0.579436 0.0866713 -0.0538957
+0.768974 0.63108 0.0900107 -0.0481117
+0.726053 0.680023 0.0929647 -0.0421217
+0.680023 0.726053 0.0955205 -0.0359514
+0.631081 0.768974 0.0976673 -0.0296271
+0.579436 0.808602 0.0993959 -0.0231759
+0.52531 0.844768 0.100699 -0.0166254
+0.468935 0.877316 0.101571 -0.0100038
+0.410552 0.906107 0.102007 -0.00333937
+0.350411 0.931019 0.102007 0.00333938
+0.288769 0.951943 0.101571 0.0100038
+0.22589 0.968791 0.100699 0.0166255
+0.162045 0.981491 0.0993959 0.0231759
+0.0975052 0.989988 0.0976673 0.0296271
+0.0325481 0.994245 0.0955205 0.0359514
+-0.0325483 0.994245 0.0929646 0.0421217
+-0.0975054 0.989988 0.0900107 0.0481117
+-0.162045 0.981491 0.0866713 0.0538957
+-0.225891 0.968791 0.0829608 0.0594489
+-0.288769 0.951943 0.078895 0.0647475
+-0.350411 0.931019 0.0744914 0.0697689
+-0.410552 0.906107 0.0697688 0.0744914
+-0.468935 0.877316 0.0647475 0.078895
+-0.52531 0.844768 0.0594489 0.0829608
+-0.579436 0.808602 0.0538957 0.0866713
+-0.63108 0.768974 0.0481117 0.0900107
+-0.680023 0.726053 0.0421217 0.0929647
+-0.726053 0.680023 0.0359514 0.0955205
+-0.768974 0.63108 0.0296271 0.0976673
+-0.808602 0.579436 0.0231759 0.0993959
+-0.844768 0.52531 0.0166254 0.100699
+-0.877316 0.468935 0.0100038 0.101571
+-0.906107 0.410552 0.00333938 0.102007
+-0.931019 0.350411 -0.00333939 0.102007
+-0.951943 0.288769 -0.0100038 0.101571
+-0.968791 0.22589 -0.0166255 0.100699
+-0.981491 0.162045 -0.0231759 0.0993959
+-0.989988 0.0975053 -0.0296271 0.0976673
+-0.994245 0.0325483 -0.0359514 0.0955205
+0.998162 0.0326765 0.0372457 -0.0348844
+0.993888 0.0978894 0.0394475 -0.0323737
+0.985358 0.162683 0.0414804 -0.0297244
+0.972608 0.22678 0.0433357 -0.0269478
+0.955694 0.289906 0.0450054 -0.0240559
+0.934687 0.351791 0.0464823 -0.0210609
+0.909677 0.412169 0.0477602 -0.0179757
+0.880772 0.470783 0.0488337 -0.0148135
+0.848096 0.52738 0.049698 -0.0115879
+0.811788 0.581719 0.0503494 -0.00831273
+0.772003 0.633567 0.0507853 -0.00500192
+0.728913 0.682702 0.0510037 -0.00166969
+0.682702 0.728913 0.0510037 0.00166969
+0.633567 0.772003 0.0507853 0.00500192
+0.581719 0.811788 0.0503494 0.00831273
+0.52738 0.848096 0.049698 0.0115879
+0.470782 0.880772 0.0488337 0.0148135
+0.412169 0.909677 0.0477602 0.0179757
+0.351791 0.934687 0.0464823 0.0210609
+0.289906 0.955694 0.0450054 0.0240559
+0.22678 0.972608 0.0433357 0.0269479
+0.162683 0.985358 0.0414804 0.0297244
+0.0978893 0.993888 0.0394475 0.0323738
+0.0326763 0.998162 0.0372457 0.0348844
+-0.0326765 0.998162 0.0348844 0.0372457
+-0.0978895 0.993888 0.0323737 0.0394475
+-0.162683 0.985358 0.0297244 0.0414804
+-0.22678 0.972608 0.0269478 0.0433357
+-0.289907 0.955693 0.0240559 0.0450054
+-0.351791 0.934686 0.0210609 0.0464823
+-0.412169 0.909677 0.0179757 0.0477603
+-0.470782 0.880772 0.0148135 0.0488337
+-0.52738 0.848096 0.0115879 0.049698
+-0.581719 0.811788 0.00831272 0.0503494
+-0.633567 0.772003 0.00500192 0.0507853
+-0.682702 0.728913 0.00166969 0.0510037
+-0.728913 0.682702 -0.00166969 0.0510037
+-0.772003 0.633567 -0.00500192 0.0507853
+-0.811788 0.581719 -0.00831272 0.0503494
+-0.848096 0.52738 -0.0115879 0.049698
+-0.880772 0.470783 -0.0148135 0.0488337
+-0.909677 0.412169 -0.0179757 0.0477602
+-0.934687 0.351791 -0.0210609 0.0464823
+-0.955693 0.289906 -0.0240559 0.0450054
+-0.972608 0.22678 -0.0269479 0.0433357
+-0.985358 0.162683 -0.0297244 0.0414804
+-0.993888 0.0978894 -0.0323737 0.0394475
+-0.998162 0.0326765 -0.0348844 0.0372457
+0.456191 0.0149342 0.88928 0.029112
+0.454238 0.0447385 0.885472 0.0872114
+0.450339 0.0743513 0.877872 0.144937
+0.444512 0.103646 0.866513 0.202043
+0.436782 0.132496 0.851444 0.258283
+0.427181 0.160779 0.832728 0.313417
+0.415751 0.188374 0.810447 0.367209
+0.40254 0.215162 0.784695 0.419428
+0.387606 0.241029 0.755583 0.469852
+0.371012 0.265863 0.723236 0.518263
+0.352829 0.28956 0.687791 0.564456
+0.333136 0.312016 0.649401 0.608231
+0.312016 0.333136 0.608231 0.649401
+0.28956 0.352829 0.564456 0.687791
+0.265863 0.371012 0.518263 0.723236
+0.241029 0.387606 0.469852 0.755583
+0.215162 0.40254 0.419428 0.784695
+0.188374 0.415751 0.367209 0.810447
+0.160779 0.427181 0.313417 0.832728
+0.132496 0.436782 0.258283 0.851444
+0.103646 0.444512 0.202043 0.866513
+0.0743512 0.450339 0.144937 0.877872
+0.0447384 0.454238 0.0872113 0.885472
+0.0149341 0.456191 0.0291119 0.88928
+-0.0149342 0.456191 -0.0291121 0.88928
+-0.0447385 0.454238 -0.0872115 0.885472
+-0.0743513 0.450339 -0.144937 0.877872
+-0.103646 0.444512 -0.202043 0.866513
+-0.132496 0.436781 -0.258283 0.851444
+-0.160779 0.427181 -0.313417 0.832728
+-0.188374 0.415751 -0.367209 0.810447
+-0.215162 0.40254 -0.419428 0.784695
+-0.241029 0.387606 -0.469852 0.755583
+-0.265864 0.371012 -0.518263 0.723236
+-0.28956 0.352829 -0.564456 0.687791
+-0.312016 0.333136 -0.608231 0.649401
+-0.333136 0.312016 -0.649401 0.608231
+-0.352829 0.28956 -0.687791 0.564456
+-0.371012 0.265864 -0.723236 0.518263
+-0.387606 0.241029 -0.755583 0.469852
+-0.40254 0.215162 -0.784695 0.419428
+-0.415751 0.188374 -0.810447 0.367209
+-0.427181 0.160779 -0.832728 0.313417
+-0.436782 0.132496 -0.851444 0.258283
+-0.444512 0.103646 -0.866513 0.202043
+-0.450339 0.0743513 -0.877872 0.144937
+-0.454238 0.0447385 -0.885472 0.0872113
+-0.456191 0.0149342 -0.88928 0.0291121
+0.499732 0.0163595 0.858616 0.113039
+0.497592 0.0490086 0.849385 0.168953
+0.493322 0.0814477 0.836516 0.224144
+0.486938 0.113538 0.820066 0.278375
+0.47847 0.145142 0.800103 0.331414
+0.467953 0.176125 0.776715 0.383033
+0.455432 0.206354 0.75 0.433013
+0.440961 0.235698 0.720074 0.481138
+0.424601 0.264034 0.687064 0.527203
+0.406423 0.291239 0.651112 0.57101
+0.386505 0.317197 0.612372 0.612372
+0.364932 0.341796 0.57101 0.651112
+0.341796 0.364932 0.527203 0.687064
+0.317197 0.386505 0.481138 0.720074
+0.291239 0.406423 0.433013 0.75
+0.264034 0.424601 0.383033 0.776715
+0.235698 0.440961 0.331414 0.800103
+0.206353 0.455432 0.278375 0.820066
+0.176125 0.467953 0.224144 0.836516
+0.145142 0.47847 0.168953 0.849385
+0.113538 0.486938 0.113039 0.858616
+0.0814477 0.493322 0.0566407 0.864171
+0.0490085 0.497592 -1.06466e-07 0.866025
+0.0163595 0.499732 -0.0566409 0.864171
+-0.0163596 0.499732 -0.113039 0.858616
+-0.0490086 0.497592 -0.168953 0.849385
+-0.0814478 0.493322 -0.224144 0.836516
+-0.113538 0.486938 -0.278375 0.820066
+-0.145142 0.47847 -0.331414 0.800103
+-0.176125 0.467953 -0.383033 0.776715
+-0.206354 0.455432 -0.433013 0.75
+-0.235698 0.440961 -0.481138 0.720074
+-0.264034 0.424601 -0.527203 0.687064
+-0.291239 0.406423 -0.57101 0.651112
+-0.317197 0.386505 -0.612372 0.612372
+-0.341796 0.364932 -0.651112 0.57101
+-0.364932 0.341796 -0.687064 0.527203
+-0.386505 0.317197 -0.720074 0.481138
+-0.406423 0.291239 -0.75 0.433013
+-0.424601 0.264034 -0.776715 0.383033
+-0.440961 0.235698 -0.800103 0.331414
+-0.455432 0.206354 -0.820066 0.278375
+-0.467953 0.176125 -0.836516 0.224144
+-0.47847 0.145142 -0.849385 0.168953
+-0.486938 0.113538 -0.858616 0.113039
+-0.493322 0.0814478 -0.864171 0.0566408
+-0.497592 0.0490085 -0.866025 -4.10824e-08
+-0.499732 0.0163596 -0.864171 -0.0566407
+0.499732 0.0163595 0.864171 -0.0566408
+0.497592 0.0490086 0.866025 2.36595e-09
+0.493322 0.0814477 0.864171 0.0566408
+0.486938 0.113538 0.858616 0.113039
+0.47847 0.145142 0.849385 0.168953
+0.467953 0.176125 0.836516 0.224144
+0.455432 0.206354 0.820066 0.278375
+0.440961 0.235698 0.800103 0.331414
+0.424601 0.264034 0.776715 0.383033
+0.406423 0.291239 0.75 0.433013
+0.386505 0.317197 0.720074 0.481138
+0.364932 0.341796 0.687064 0.527203
+0.341796 0.364932 0.651112 0.57101
+0.317197 0.386505 0.612372 0.612372
+0.291239 0.406423 0.57101 0.651112
+0.264034 0.424601 0.527203 0.687064
+0.235698 0.440961 0.481138 0.720074
+0.206353 0.455432 0.433013 0.75
+0.176125 0.467953 0.383033 0.776715
+0.145142 0.47847 0.331414 0.800103
+0.113538 0.486938 0.278375 0.820066
+0.0814477 0.493322 0.224144 0.836516
+0.0490085 0.497592 0.168953 0.849385
+0.0163595 0.499732 0.113039 0.858616
+-0.0163596 0.499732 0.0566407 0.864171
+-0.0490086 0.497592 -7.24831e-08 0.866025
+-0.0814478 0.493322 -0.0566408 0.864171
+-0.113538 0.486938 -0.113039 0.858616
+-0.145142 0.47847 -0.168953 0.849385
+-0.176125 0.467953 -0.224144 0.836516
+-0.206354 0.455432 -0.278375 0.820066
+-0.235698 0.440961 -0.331413 0.800103
+-0.264034 0.424601 -0.383033 0.776715
+-0.291239 0.406423 -0.433013 0.75
+-0.317197 0.386505 -0.481138 0.720074
+-0.341796 0.364932 -0.527203 0.687064
+-0.364932 0.341796 -0.57101 0.651112
+-0.386505 0.317197 -0.612372 0.612372
+-0.406423 0.291239 -0.651112 0.57101
+-0.424601 0.264034 -0.687064 0.527203
+-0.440961 0.235698 -0.720074 0.481138
+-0.455432 0.206354 -0.75 0.433013
+-0.467953 0.176125 -0.776715 0.383033
+-0.47847 0.145142 -0.800103 0.331414
+-0.486938 0.113538 -0.820066 0.278375
+-0.493322 0.0814478 -0.836516 0.224144
+-0.497592 0.0490085 -0.849385 0.168953
+-0.499732 0.0163596 -0.858616 0.113039
+0.539773 0.0176703 0.841175 0.0275372
+0.537461 0.0529353 0.837573 0.0824937
+0.532848 0.0879736 0.830384 0.137097
+0.525954 0.122635 0.81964 0.191113
+0.516807 0.156772 0.805385 0.244311
+0.505447 0.190237 0.787682 0.296463
+0.491923 0.222887 0.766606 0.347345
+0.476292 0.254583 0.742247 0.396739
+0.458622 0.285189 0.71471 0.444435
+0.438987 0.314574 0.684112 0.490228
+0.417473 0.342612 0.650585 0.533921
+0.394172 0.369182 0.614272 0.575329
+0.369182 0.394172 0.575329 0.614272
+0.342612 0.417473 0.533922 0.650585
+0.314574 0.438987 0.490228 0.684112
+0.285189 0.458622 0.444435 0.71471
+0.254583 0.476292 0.396739 0.742247
+0.222887 0.491923 0.347345 0.766606
+0.190237 0.505447 0.296463 0.787682
+0.156772 0.516807 0.244311 0.805385
+0.122635 0.525954 0.191113 0.81964
+0.0879735 0.532848 0.137097 0.830384
+0.0529352 0.537461 0.0824936 0.837573
+0.0176703 0.539773 0.0275371 0.841175
+-0.0176704 0.539773 -0.0275373 0.841175
+-0.0529354 0.537461 -0.0824938 0.837573
+-0.0879736 0.532848 -0.137097 0.830384
+-0.122635 0.525954 -0.191113 0.81964
+-0.156772 0.516807 -0.244311 0.805385
+-0.190237 0.505447 -0.296463 0.787682
+-0.222887 0.491923 -0.347345 0.766606
+-0.254583 0.476292 -0.396739 0.742247
+-0.285189 0.458622 -0.444435 0.71471
+-0.314574 0.438987 -0.490228 0.684112
+-0.342612 0.417473 -0.533921 0.650585
+-0.369182 0.394172 -0.575329 0.614272
+-0.394172 0.369182 -0.614272 0.575329
+-0.417473 0.342612 -0.650585 0.533921
+-0.438987 0.314574 -0.684112 0.490228
+-0.458622 0.285189 -0.71471 0.444435
+-0.476292 0.254583 -0.742247 0.39674
+-0.491923 0.222887 -0.766606 0.347345
+-0.505447 0.190237 -0.787682 0.296463
+-0.516807 0.156772 -0.805385 0.244311
+-0.525954 0.122635 -0.81964 0.191113
+-0.532848 0.0879736 -0.830384 0.137097
+-0.537461 0.0529353 -0.837573 0.0824937
+-0.539773 0.0176704 -0.841175 0.0275373
+0.539773 0.0176703 0.81964 0.191113
+0.537461 0.0529353 0.805385 0.244311
+0.532848 0.0879736 0.787682 0.296463
+0.525954 0.122635 0.766606 0.347345
+0.516807 0.156772 0.742247 0.396739
+0.505447 0.190237 0.71471 0.444435
+0.491923 0.222887 0.684112 0.490228
+0.476292 0.254583 0.650585 0.533922
+0.458622 0.285189 0.614272 0.575329
+0.438987 0.314574 0.575329 0.614272
+0.417473 0.342612 0.533922 0.650585
+0.394172 0.369182 0.490228 0.684112
+0.369182 0.394172 0.444435 0.71471
+0.342612 0.417473 0.396739 0.742247
+0.314574 0.438987 0.347345 0.766606
+0.285189 0.458622 0.296463 0.787682
+0.254583 0.476292 0.244311 0.805385
+0.222887 0.491923 0.191113 0.81964
+0.190237 0.505447 0.137097 0.830384
+0.156772 0.516807 0.0824937 0.837573
+0.122635 0.525954 0.0275372 0.841175
+0.0879735 0.532848 -0.0275373 0.841175
+0.0529352 0.537461 -0.0824938 0.837573
+0.0176703 0.539773 -0.137097 0.830384
+-0.0176704 0.539773 -0.191113 0.81964
+-0.0529354 0.537461 -0.244311 0.805385
+-0.0879736 0.532848 -0.296463 0.787682
+-0.122635 0.525954 -0.347345 0.766606
+-0.156772 0.516807 -0.39674 0.742247
+-0.190237 0.505447 -0.444435 0.71471
+-0.222887 0.491923 -0.490228 0.684112
+-0.254583 0.476292 -0.533921 0.650585
+-0.285189 0.458622 -0.575329 0.614272
+-0.314574 0.438987 -0.614272 0.575329
+-0.342612 0.417473 -0.650585 0.533922
+-0.369182 0.394172 -0.684112 0.490228
+-0.394172 0.369182 -0.71471 0.444435
+-0.417473 0.342612 -0.742247 0.396739
+-0.438987 0.314574 -0.766606 0.347345
+-0.458622 0.285189 -0.787682 0.296463
+-0.476292 0.254583 -0.805385 0.244311
+-0.491923 0.222887 -0.81964 0.191113
+-0.505447 0.190237 -0.830384 0.137097
+-0.516807 0.156772 -0.837573 0.0824937
+-0.525954 0.122635 -0.841175 0.0275371
+-0.532848 0.0879736 -0.841175 -0.0275372
+-0.537461 0.0529353 -0.837573 -0.0824938
+-0.539773 0.0176704 -0.830384 -0.137097
+0.577041 0.0188904 0.773165 0.262454
+0.57457 0.0565902 0.754344 0.31246
+0.569639 0.0940477 0.732294 0.361127
+0.562268 0.131103 0.707107 0.408248
+0.55249 0.167596 0.678892 0.453621
+0.540346 0.203372 0.64777 0.497052
+0.525887 0.238277 0.613875 0.538354
+0.509177 0.272161 0.57735 0.57735
+0.490287 0.30488 0.538354 0.613875
+0.469297 0.336294 0.497052 0.64777
+0.446298 0.366267 0.453621 0.678892
+0.421387 0.394672 0.408248 0.707107
+0.394672 0.421387 0.361127 0.732294
+0.366267 0.446298 0.31246 0.754344
+0.336294 0.469297 0.262454 0.773165
+0.30488 0.490287 0.211325 0.788675
+0.272161 0.509178 0.159291 0.800808
+0.238276 0.525887 0.106574 0.809511
+0.203372 0.540346 0.0534014 0.814748
+0.167596 0.55249 -3.50817e-08 0.816497
+0.131103 0.562268 -0.0534015 0.814748
+0.0940477 0.569639 -0.106574 0.809511
+0.0565902 0.57457 -0.159291 0.800808
+0.0188903 0.577041 -0.211325 0.788675
+-0.0188904 0.577041 -0.262454 0.773165
+-0.0565903 0.57457 -0.31246 0.754344
+-0.0940478 0.569639 -0.361127 0.732294
+-0.131103 0.562268 -0.408248 0.707107
+-0.167596 0.55249 -0.453621 0.678892
+-0.203372 0.540346 -0.497052 0.64777
+-0.238277 0.525887 -0.538354 0.613875
+-0.272161 0.509178 -0.57735 0.57735
+-0.30488 0.490287 -0.613875 0.538354
+-0.336294 0.469297 -0.64777 0.497052
+-0.366267 0.446298 -0.678892 0.453621
+-0.394672 0.421387 -0.707107 0.408248
+-0.421387 0.394672 -0.732294 0.361127
+-0.446298 0.366267 -0.754344 0.31246
+-0.469297 0.336294 -0.773165 0.262454
+-0.490287 0.30488 -0.788675 0.211325
+-0.509177 0.272161 -0.800808 0.159291
+-0.525887 0.238277 -0.809511 0.106574
+-0.540346 0.203372 -0.814748 0.0534014
+-0.55249 0.167596 -0.816497 2.65621e-08
+-0.562268 0.131103 -0.814748 -0.0534015
+-0.569639 0.0940478 -0.809511 -0.106574
+-0.57457 0.0565902 -0.800808 -0.159291
+-0.577041 0.0188904 -0.788675 -0.211325
+0.577041 0.0188904 0.809511 0.106574
+0.57457 0.0565902 0.800808 0.159291
+0.569639 0.0940477 0.788675 0.211325
+0.562268 0.131103 0.773165 0.262454
+0.55249 0.167596 0.754344 0.31246
+0.540346 0.203372 0.732294 0.361127
+0.525887 0.238277 0.707107 0.408248
+0.509177 0.272161 0.678892 0.453621
+0.490287 0.30488 0.64777 0.497052
+0.469297 0.336294 0.613875 0.538354
+0.446298 0.366267 0.57735 0.57735
+0.421387 0.394672 0.538354 0.613875
+0.394672 0.421387 0.497052 0.64777
+0.366267 0.446298 0.453621 0.678892
+0.336294 0.469297 0.408248 0.707107
+0.30488 0.490287 0.361127 0.732294
+0.272161 0.509178 0.31246 0.754345
+0.238276 0.525887 0.262454 0.773165
+0.203372 0.540346 0.211325 0.788675
+0.167596 0.55249 0.159291 0.800808
+0.131103 0.562268 0.106574 0.809511
+0.0940477 0.569639 0.0534013 0.814748
+0.0565902 0.57457 -1.00377e-07 0.816497
+0.0188903 0.577041 -0.0534015 0.814748
+-0.0188904 0.577041 -0.106574 0.809511
+-0.0565903 0.57457 -0.159291 0.800808
+-0.0940478 0.569639 -0.211325 0.788675
+-0.131103 0.562268 -0.262454 0.773165
+-0.167596 0.55249 -0.31246 0.754344
+-0.203372 0.540346 -0.361127 0.732293
+-0.238277 0.525887 -0.408248 0.707107
+-0.272161 0.509178 -0.453621 0.678892
+-0.30488 0.490287 -0.497052 0.64777
+-0.336294 0.469297 -0.538354 0.613875
+-0.366267 0.446298 -0.57735 0.57735
+-0.394672 0.421387 -0.613875 0.538354
+-0.421387 0.394672 -0.64777 0.497052
+-0.446298 0.366267 -0.678892 0.453621
+-0.469297 0.336294 -0.707107 0.408248
+-0.490287 0.30488 -0.732294 0.361127
+-0.509177 0.272161 -0.754344 0.31246
+-0.525887 0.238277 -0.773165 0.262454
+-0.540346 0.203372 -0.788675 0.211325
+-0.55249 0.167596 -0.800808 0.159291
+-0.562268 0.131103 -0.809511 0.106574
+-0.569639 0.0940478 -0.814748 0.0534015
+-0.57457 0.0565902 -0.816497 -3.87329e-08
+-0.577041 0.0188904 -0.814748 -0.0534014
+0.612045 0.0200363 0.769917 0.17952
+0.609424 0.060023 0.756528 0.22949
+0.604193 0.0997527 0.739899 0.278478
+0.596375 0.139055 0.720101 0.326274
+0.586004 0.177762 0.69722 0.372672
+0.573123 0.215708 0.671353 0.417474
+0.557788 0.25273 0.642612 0.460489
+0.540064 0.28867 0.611118 0.501532
+0.520028 0.323374 0.577008 0.540427
+0.497765 0.356693 0.540427 0.577008
+0.47337 0.388485 0.501532 0.611118
+0.446949 0.418613 0.460489 0.642612
+0.418613 0.446949 0.417474 0.671353
+0.388485 0.47337 0.372672 0.69722
+0.356693 0.497765 0.326274 0.720101
+0.323374 0.520028 0.278478 0.739899
+0.28867 0.540064 0.22949 0.756528
+0.25273 0.557788 0.179519 0.769917
+0.215708 0.573123 0.12878 0.78001
+0.177762 0.586004 0.0774893 0.786763
+0.139055 0.596375 0.0258667 0.790146
+0.0997526 0.604193 -0.0258668 0.790146
+0.0600229 0.609424 -0.0774895 0.786763
+0.0200362 0.612045 -0.12878 0.78001
+-0.0200363 0.612045 -0.17952 0.769917
+-0.060023 0.609424 -0.22949 0.756528
+-0.0997527 0.604193 -0.278478 0.739899
+-0.139055 0.596375 -0.326274 0.720101
+-0.177762 0.586004 -0.372672 0.69722
+-0.215708 0.573123 -0.417474 0.671353
+-0.25273 0.557788 -0.460489 0.642612
+-0.28867 0.540064 -0.501532 0.611118
+-0.323374 0.520028 -0.540427 0.577008
+-0.356693 0.497765 -0.577008 0.540427
+-0.388485 0.47337 -0.611118 0.501532
+-0.418613 0.446949 -0.642612 0.460489
+-0.446949 0.418613 -0.671353 0.417474
+-0.47337 0.388485 -0.69722 0.372672
+-0.497765 0.356693 -0.720101 0.326274
+-0.520028 0.323374 -0.739899 0.278478
+-0.540064 0.28867 -0.756528 0.22949
+-0.557788 0.25273 -0.769917 0.17952
+-0.573123 0.215708 -0.78001 0.12878
+-0.586004 0.177762 -0.786763 0.0774894
+-0.596375 0.139055 -0.790146 0.0258666
+-0.604193 0.0997527 -0.790146 -0.0258667
+-0.609424 0.060023 -0.786763 -0.0774894
+-0.612045 0.0200363 -0.78001 -0.12878
+0.539773 0.0176703 0.830384 -0.137097
+0.537461 0.0529353 0.837573 -0.0824937
+0.532848 0.0879736 0.841175 -0.0275372
+0.525954 0.122635 0.841175 0.0275372
+0.516807 0.156772 0.837573 0.0824937
+0.505447 0.190237 0.830384 0.137097
+0.491923 0.222887 0.81964 0.191113
+0.476292 0.254583 0.805385 0.244311
+0.458622 0.285189 0.787682 0.296463
+0.438987 0.314574 0.766606 0.347345
+0.417473 0.342612 0.742247 0.396739
+0.394172 0.369182 0.71471 0.444435
+0.369182 0.394172 0.684112 0.490228
+0.342612 0.417473 0.650585 0.533922
+0.314574 0.438987 0.614272 0.575329
+0.285189 0.458622 0.575329 0.614272
+0.254583 0.476292 0.533921 0.650585
+0.222887 0.491923 0.490228 0.684112
+0.190237 0.505447 0.444435 0.71471
+0.156772 0.516807 0.396739 0.742247
+0.122635 0.525954 0.347345 0.766606
+0.0879735 0.532848 0.296462 0.787682
+0.0529352 0.537461 0.244311 0.805385
+0.0176703 0.539773 0.191113 0.81964
+-0.0176704 0.539773 0.137097 0.830384
+-0.0529354 0.537461 0.0824936 0.837573
+-0.0879736 0.532848 0.0275372 0.841175
+-0.122635 0.525954 -0.0275373 0.841175
+-0.156772 0.516807 -0.0824939 0.837573
+-0.190237 0.505447 -0.137097 0.830384
+-0.222887 0.491923 -0.191113 0.81964
+-0.254583 0.476292 -0.244311 0.805385
+-0.285189 0.458622 -0.296463 0.787682
+-0.314574 0.438987 -0.347345 0.766606
+-0.342612 0.417473 -0.396739 0.742247
+-0.369182 0.394172 -0.444435 0.71471
+-0.394172 0.369182 -0.490228 0.684112
+-0.417473 0.342612 -0.533922 0.650585
+-0.438987 0.314574 -0.575329 0.614272
+-0.458622 0.285189 -0.614272 0.575329
+-0.476292 0.254583 -0.650585 0.533922
+-0.491923 0.222887 -0.684112 0.490228
+-0.505447 0.190237 -0.71471 0.444435
+-0.516807 0.156772 -0.742247 0.396739
+-0.525954 0.122635 -0.766606 0.347345
+-0.532848 0.0879736 -0.787682 0.296463
+-0.537461 0.0529353 -0.805385 0.244311
+-0.539773 0.0176704 -0.81964 0.191113
+0.577041 0.0188904 0.814748 -0.0534014
+0.57457 0.0565902 0.816497 2.23064e-09
+0.569639 0.0940477 0.814748 0.0534014
+0.562268 0.131103 0.809511 0.106574
+0.55249 0.167596 0.800808 0.159291
+0.540346 0.203372 0.788675 0.211325
+0.525887 0.238277 0.773165 0.262454
+0.509177 0.272161 0.754344 0.31246
+0.490287 0.30488 0.732294 0.361127
+0.469297 0.336294 0.707107 0.408248
+0.446298 0.366267 0.678892 0.453621
+0.421387 0.394672 0.64777 0.497052
+0.394672 0.421387 0.613875 0.538354
+0.366267 0.446298 0.57735 0.57735
+0.336294 0.469297 0.538354 0.613875
+0.30488 0.490287 0.497052 0.64777
+0.272161 0.509178 0.453621 0.678892
+0.238276 0.525887 0.408248 0.707107
+0.203372 0.540346 0.361127 0.732294
+0.167596 0.55249 0.31246 0.754344
+0.131103 0.562268 0.262454 0.773165
+0.0940477 0.569639 0.211325 0.788675
+0.0565902 0.57457 0.15929 0.800808
+0.0188903 0.577041 0.106574 0.809511
+-0.0188904 0.577041 0.0534014 0.814748
+-0.0565903 0.57457 -6.83377e-08 0.816497
+-0.0940478 0.569639 -0.0534015 0.814748
+-0.131103 0.562268 -0.106574 0.809511
+-0.167596 0.55249 -0.159291 0.800808
+-0.203372 0.540346 -0.211325 0.788675
+-0.238277 0.525887 -0.262454 0.773165
+-0.272161 0.509178 -0.31246 0.754345
+-0.30488 0.490287 -0.361127 0.732294
+-0.336294 0.469297 -0.408248 0.707107
+-0.366267 0.446298 -0.453621 0.678892
+-0.394672 0.421387 -0.497052 0.64777
+-0.421387 0.394672 -0.538354 0.613875
+-0.446298 0.366267 -0.57735 0.57735
+-0.469297 0.336294 -0.613875 0.538354
+-0.490287 0.30488 -0.64777 0.497052
+-0.509177 0.272161 -0.678892 0.453621
+-0.525887 0.238277 -0.707107 0.408248
+-0.540346 0.203372 -0.732294 0.361127
+-0.55249 0.167596 -0.754344 0.31246
+-0.562268 0.131103 -0.773165 0.262454
+-0.569639 0.0940478 -0.788675 0.211325
+-0.57457 0.0565902 -0.800808 0.159291
+-0.577041 0.0188904 -0.809511 0.106574
+0.577041 0.0188904 0.788675 -0.211325
+0.57457 0.0565902 0.800808 -0.159291
+0.569639 0.0940477 0.809511 -0.106574
+0.562268 0.131103 0.814748 -0.0534014
+0.55249 0.167596 0.816497 6.08539e-10
+0.540346 0.203372 0.814748 0.0534014
+0.525887 0.238277 0.809511 0.106574
+0.509177 0.272161 0.800808 0.159291
+0.490287 0.30488 0.788675 0.211325
+0.469297 0.336294 0.773165 0.262454
+0.446298 0.366267 0.754344 0.31246
+0.421387 0.394672 0.732294 0.361127
+0.394672 0.421387 0.707107 0.408248
+0.366267 0.446298 0.678892 0.453621
+0.336294 0.469297 0.64777 0.497052
+0.30488 0.490287 0.613875 0.538354
+0.272161 0.509178 0.57735 0.57735
+0.238276 0.525887 0.538354 0.613875
+0.203372 0.540346 0.497052 0.64777
+0.167596 0.55249 0.453621 0.678892
+0.131103 0.562268 0.408248 0.707107
+0.0940477 0.569639 0.361127 0.732294
+0.0565902 0.57457 0.31246 0.754345
+0.0188903 0.577041 0.262454 0.773165
+-0.0188904 0.577041 0.211325 0.788675
+-0.0565903 0.57457 0.159291 0.800808
+-0.0940478 0.569639 0.106574 0.809511
+-0.131103 0.562268 0.0534014 0.814748
+-0.167596 0.55249 -1.33633e-07 0.816497
+-0.203372 0.540346 -0.0534016 0.814748
+-0.238277 0.525887 -0.106574 0.809511
+-0.272161 0.509178 -0.15929 0.800808
+-0.30488 0.490287 -0.211325 0.788675
+-0.336294 0.469297 -0.262454 0.773165
+-0.366267 0.446298 -0.31246 0.754344
+-0.394672 0.421387 -0.361127 0.732294
+-0.421387 0.394672 -0.408248 0.707107
+-0.446298 0.366267 -0.453621 0.678892
+-0.469297 0.336294 -0.497052 0.64777
+-0.490287 0.30488 -0.538354 0.613875
+-0.509177 0.272161 -0.57735 0.57735
+-0.525887 0.238277 -0.613875 0.538354
+-0.540346 0.203372 -0.64777 0.497052
+-0.55249 0.167596 -0.678892 0.453621
+-0.562268 0.131103 -0.707107 0.408248
+-0.569639 0.0940478 -0.732294 0.361127
+-0.57457 0.0565902 -0.754344 0.31246
+-0.577041 0.0188904 -0.773165 0.262454
+0.612045 0.0200363 0.78001 -0.12878
+0.609424 0.060023 0.786763 -0.0774894
+0.604193 0.0997527 0.790146 -0.0258667
+0.596375 0.139055 0.790146 0.0258667
+0.586004 0.177762 0.786763 0.0774894
+0.573123 0.215708 0.78001 0.12878
+0.557788 0.25273 0.769917 0.17952
+0.540064 0.28867 0.756528 0.22949
+0.520028 0.323374 0.739899 0.278478
+0.497765 0.356693 0.720101 0.326274
+0.47337 0.388485 0.69722 0.372672
+0.446949 0.418613 0.671353 0.417474
+0.418613 0.446949 0.642612 0.460489
+0.388485 0.47337 0.611118 0.501532
+0.356693 0.497765 0.577008 0.540427
+0.323374 0.520028 0.540427 0.577008
+0.28867 0.540064 0.501532 0.611118
+0.25273 0.557788 0.460489 0.642612
+0.215708 0.573123 0.417474 0.671353
+0.177762 0.586004 0.372672 0.69722
+0.139055 0.596375 0.326274 0.720101
+0.0997526 0.604193 0.278478 0.739899
+0.0600229 0.609424 0.22949 0.756528
+0.0200362 0.612045 0.179519 0.769917
+-0.0200363 0.612045 0.12878 0.78001
+-0.060023 0.609424 0.0774893 0.786763
+-0.0997527 0.604193 0.0258667 0.790146
+-0.139055 0.596375 -0.0258668 0.790146
+-0.177762 0.586004 -0.0774895 0.786763
+-0.215708 0.573123 -0.12878 0.78001
+-0.25273 0.557788 -0.17952 0.769917
+-0.28867 0.540064 -0.22949 0.756528
+-0.323374 0.520028 -0.278478 0.739899
+-0.356693 0.497765 -0.326274 0.720101
+-0.388485 0.47337 -0.372672 0.69722
+-0.418613 0.446949 -0.417474 0.671353
+-0.446949 0.418613 -0.460489 0.642612
+-0.47337 0.388485 -0.501532 0.611118
+-0.497765 0.356693 -0.540427 0.577008
+-0.520028 0.323374 -0.577008 0.540427
+-0.540064 0.28867 -0.611118 0.501532
+-0.557788 0.25273 -0.642612 0.460489
+-0.573123 0.215708 -0.671353 0.417474
+-0.586004 0.177762 -0.69722 0.372672
+-0.596375 0.139055 -0.720101 0.326274
+-0.604193 0.0997527 -0.739899 0.278478
+-0.609424 0.060023 -0.756528 0.22949
+-0.612045 0.0200363 -0.769917 0.17952
+0.612045 0.0200363 0.790146 0.0258667
+0.609424 0.060023 0.786763 0.0774894
+0.604193 0.0997527 0.78001 0.12878
+0.596375 0.139055 0.769917 0.17952
+0.586004 0.177762 0.756528 0.22949
+0.573123 0.215708 0.739899 0.278478
+0.557788 0.25273 0.720101 0.326274
+0.540064 0.28867 0.69722 0.372672
+0.520028 0.323374 0.671353 0.417474
+0.497765 0.356693 0.642612 0.460489
+0.47337 0.388485 0.611118 0.501532
+0.446949 0.418613 0.577008 0.540427
+0.418613 0.446949 0.540427 0.577008
+0.388485 0.47337 0.501532 0.611118
+0.356693 0.497765 0.460489 0.642612
+0.323374 0.520028 0.417474 0.671353
+0.28867 0.540064 0.372672 0.69722
+0.25273 0.557788 0.326274 0.720101
+0.215708 0.573123 0.278478 0.739899
+0.177762 0.586004 0.22949 0.756528
+0.139055 0.596375 0.17952 0.769917
+0.0997526 0.604193 0.12878 0.78001
+0.0600229 0.609424 0.0774893 0.786763
+0.0200362 0.612045 0.0258666 0.790146
+-0.0200363 0.612045 -0.0258668 0.790146
+-0.060023 0.609424 -0.0774894 0.786763
+-0.0997527 0.604193 -0.12878 0.78001
+-0.139055 0.596375 -0.17952 0.769917
+-0.177762 0.586004 -0.22949 0.756528
+-0.215708 0.573123 -0.278478 0.739899
+-0.25273 0.557788 -0.326274 0.720101
+-0.28867 0.540064 -0.372672 0.69722
+-0.323374 0.520028 -0.417474 0.671353
+-0.356693 0.497765 -0.460489 0.642612
+-0.388485 0.47337 -0.501532 0.611118
+-0.418613 0.446949 -0.540427 0.577008
+-0.446949 0.418613 -0.577008 0.540427
+-0.47337 0.388485 -0.611118 0.501532
+-0.497765 0.356693 -0.642612 0.460489
+-0.520028 0.323374 -0.671353 0.417474
+-0.540064 0.28867 -0.69722 0.372672
+-0.557788 0.25273 -0.720101 0.326274
+-0.573123 0.215708 -0.739899 0.278478
+-0.586004 0.177762 -0.756528 0.22949
+-0.596375 0.139055 -0.769917 0.179519
+-0.604193 0.0997527 -0.78001 0.12878
+-0.609424 0.060023 -0.786763 0.0774893
+-0.612045 0.0200363 -0.790146 0.0258668
+0.645152 0.0211201 0.757229 0.099691
+0.642389 0.0632698 0.749087 0.149003
+0.636876 0.105149 0.737738 0.197676
+0.628635 0.146577 0.72323 0.245503
+0.617702 0.187378 0.705625 0.292279
+0.604125 0.227376 0.684998 0.337804
+0.58796 0.266401 0.661438 0.381881
+0.569278 0.304285 0.635045 0.424324
+0.548158 0.340866 0.605934 0.464949
+0.52469 0.375988 0.574227 0.503584
+0.498976 0.409499 0.540062 0.540062
+0.471125 0.441257 0.503584 0.574227
+0.441257 0.471125 0.464949 0.605934
+0.409499 0.498976 0.424324 0.635045
+0.375988 0.52469 0.381881 0.661438
+0.340866 0.548158 0.337804 0.684998
+0.304285 0.569278 0.292279 0.705625
+0.266401 0.58796 0.245503 0.72323
+0.227376 0.604125 0.197676 0.737738
+0.187378 0.617702 0.149003 0.749087
+0.146577 0.628635 0.099691 0.757229
+0.105148 0.636876 0.0499524 0.762127
+0.0632697 0.642389 -9.38938e-08 0.763763
+0.02112 0.645152 -0.0499525 0.762127
+-0.0211201 0.645152 -0.0996911 0.757229
+-0.0632698 0.642389 -0.149003 0.749087
+-0.105149 0.636876 -0.197676 0.737738
+-0.146577 0.628635 -0.245503 0.72323
+-0.187378 0.617702 -0.292279 0.705625
+-0.227377 0.604125 -0.337804 0.684998
+-0.266401 0.58796 -0.381881 0.661438
+-0.304285 0.569278 -0.424324 0.635045
+-0.340866 0.548158 -0.464949 0.605934
+-0.375988 0.52469 -0.503584 0.574227
+-0.409499 0.498976 -0.540062 0.540062
+-0.441257 0.471125 -0.574227 0.503584
+-0.471125 0.441257 -0.605934 0.464949
+-0.498976 0.409499 -0.635045 0.424324
+-0.52469 0.375988 -0.661438 0.381881
+-0.548158 0.340866 -0.684998 0.337804
+-0.569278 0.304285 -0.705625 0.292279
+-0.58796 0.266401 -0.72323 0.245503
+-0.604125 0.227376 -0.737738 0.197676
+-0.617702 0.187378 -0.749087 0.149003
+-0.628635 0.146577 -0.757229 0.099691
+-0.636876 0.105149 -0.762127 0.0499525
+-0.642389 0.0632698 -0.763763 -3.62313e-08
+-0.645152 0.0211201 -0.762127 -0.0499524
+0.645152 0.0211201 0.762127 -0.0499525
+0.642389 0.0632698 0.763763 2.08657e-09
+0.636876 0.105149 0.762127 0.0499525
+0.628635 0.146577 0.757229 0.099691
+0.617702 0.187378 0.749087 0.149003
+0.604125 0.227376 0.737738 0.197676
+0.58796 0.266401 0.72323 0.245503
+0.569278 0.304285 0.705625 0.292279
+0.548158 0.340866 0.684998 0.337804
+0.52469 0.375988 0.661438 0.381881
+0.498976 0.409499 0.635045 0.424324
+0.471125 0.441257 0.605934 0.464949
+0.441257 0.471125 0.574227 0.503584
+0.409499 0.498976 0.540062 0.540062
+0.375988 0.52469 0.503584 0.574227
+0.340866 0.548158 0.464949 0.605934
+0.304285 0.569278 0.424324 0.635045
+0.266401 0.58796 0.381881 0.661438
+0.227376 0.604125 0.337804 0.684998
+0.187378 0.617702 0.292279 0.705625
+0.146577 0.628635 0.245503 0.72323
+0.105148 0.636876 0.197676 0.737738
+0.0632697 0.642389 0.149003 0.749087
+0.02112 0.645152 0.0996909 0.757229
+-0.0211201 0.645152 0.0499524 0.762127
+-0.0632698 0.642389 -6.39241e-08 0.763763
+-0.105149 0.636876 -0.0499525 0.762127
+-0.146577 0.628635 -0.0996911 0.757229
+-0.187378 0.617702 -0.149003 0.749087
+-0.227377 0.604125 -0.197676 0.737738
+-0.266401 0.58796 -0.245504 0.72323
+-0.304285 0.569278 -0.292279 0.705625
+-0.340866 0.548158 -0.337804 0.684998
+-0.375988 0.52469 -0.381881 0.661438
+-0.409499 0.498976 -0.424324 0.635045
+-0.441257 0.471125 -0.464949 0.605934
+-0.471125 0.441257 -0.503584 0.574227
+-0.498976 0.409499 -0.540062 0.540062
+-0.52469 0.375988 -0.574227 0.503584
+-0.548158 0.340866 -0.605934 0.464949
+-0.569278 0.304285 -0.635045 0.424324
+-0.58796 0.266401 -0.661438 0.381881
+-0.604125 0.227376 -0.684998 0.337803
+-0.617702 0.187378 -0.705625 0.292279
+-0.628635 0.146577 -0.72323 0.245503
+-0.636876 0.105149 -0.737738 0.197676
+-0.642389 0.0632698 -0.749087 0.149003
+-0.645152 0.0211201 -0.757229 0.0996911
+0.676641 0.0221509 0.735586 0.0240806
+0.673743 0.0663579 0.732436 0.0721387
+0.667961 0.110281 0.72615 0.119888
+0.659318 0.153731 0.716754 0.167124
+0.647852 0.196524 0.704289 0.213644
+0.633611 0.238474 0.688808 0.259249
+0.616658 0.279404 0.670378 0.303744
+0.597064 0.319137 0.649076 0.346939
+0.574913 0.357504 0.624996 0.388647
+0.5503 0.394339 0.598239 0.428692
+0.523331 0.429486 0.56892 0.466901
+0.49412 0.462794 0.537165 0.50311
+0.462794 0.49412 0.50311 0.537165
+0.429486 0.523331 0.466901 0.56892
+0.394339 0.5503 0.428692 0.598239
+0.357504 0.574913 0.388647 0.624996
+0.319137 0.597064 0.346939 0.649077
+0.279404 0.616658 0.303744 0.670378
+0.238474 0.633611 0.259249 0.688808
+0.196524 0.647852 0.213644 0.704289
+0.153731 0.659318 0.167124 0.716754
+0.110281 0.667961 0.119888 0.72615
+0.0663578 0.673743 0.0721386 0.732436
+0.0221508 0.676641 0.0240805 0.735586
+-0.022151 0.676641 -0.0240807 0.735586
+-0.066358 0.673743 -0.0721387 0.732436
+-0.110281 0.667961 -0.119888 0.72615
+-0.153731 0.659318 -0.167124 0.716754
+-0.196524 0.647852 -0.213644 0.704289
+-0.238475 0.633611 -0.259249 0.688808
+-0.279404 0.616658 -0.303744 0.670378
+-0.319137 0.597064 -0.346939 0.649077
+-0.357504 0.574913 -0.388647 0.624996
+-0.394339 0.5503 -0.428692 0.598239
+-0.429486 0.523331 -0.466901 0.56892
+-0.462794 0.49412 -0.50311 0.537165
+-0.49412 0.462794 -0.537165 0.50311
+-0.523331 0.429486 -0.56892 0.466901
+-0.5503 0.394339 -0.598239 0.428692
+-0.574913 0.357504 -0.624996 0.388647
+-0.597063 0.319137 -0.649076 0.346939
+-0.616658 0.279404 -0.670378 0.303744
+-0.633611 0.238474 -0.688808 0.259249
+-0.647852 0.196524 -0.704289 0.213644
+-0.659318 0.153731 -0.716754 0.167124
+-0.667961 0.110281 -0.72615 0.119888
+-0.673743 0.0663579 -0.732436 0.0721386
+-0.676641 0.022151 -0.735586 0.0240807
+0.612045 0.0200363 0.720101 0.326274
+0.609424 0.060023 0.69722 0.372672
+0.604193 0.0997527 0.671353 0.417474
+0.596375 0.139055 0.642612 0.460489
+0.586004 0.177762 0.611118 0.501532
+0.573123 0.215708 0.577008 0.540427
+0.557788 0.25273 0.540427 0.577008
+0.540064 0.28867 0.501532 0.611118
+0.520028 0.323374 0.460489 0.642612
+0.497765 0.356693 0.417474 0.671353
+0.47337 0.388485 0.372672 0.69722
+0.446949 0.418613 0.326274 0.720101
+0.418613 0.446949 0.278478 0.739899
+0.388485 0.47337 0.22949 0.756528
+0.356693 0.497765 0.17952 0.769917
+0.323374 0.520028 0.12878 0.78001
+0.28867 0.540064 0.0774893 0.786763
+0.25273 0.557788 0.0258667 0.790146
+0.215708 0.573123 -0.0258668 0.790146
+0.177762 0.586004 -0.0774894 0.786763
+0.139055 0.596375 -0.12878 0.78001
+0.0997526 0.604193 -0.17952 0.769917
+0.0600229 0.609424 -0.22949 0.756528
+0.0200362 0.612045 -0.278478 0.739899
+-0.0200363 0.612045 -0.326274 0.720101
+-0.060023 0.609424 -0.372672 0.69722
+-0.0997527 0.604193 -0.417474 0.671353
+-0.139055 0.596375 -0.460489 0.642612
+-0.177762 0.586004 -0.501532 0.611118
+-0.215708 0.573123 -0.540427 0.577008
+-0.25273 0.557788 -0.577008 0.540427
+-0.28867 0.540064 -0.611118 0.501532
+-0.323374 0.520028 -0.642612 0.460489
+-0.356693 0.497765 -0.671353 0.417474
+-0.388485 0.47337 -0.69722 0.372672
+-0.418613 0.446949 -0.720101 0.326274
+-0.446949 0.418613 -0.739899 0.278478
+-0.47337 0.388485 -0.756528 0.22949
+-0.497765 0.356693 -0.769917 0.17952
+-0.520028 0.323374 -0.78001 0.12878
+-0.540064 0.28867 -0.786763 0.0774894
+-0.557788 0.25273 -0.790146 0.0258667
+-0.573123 0.215708 -0.790146 -0.0258668
+-0.586004 0.177762 -0.786763 -0.0774893
+-0.596375 0.139055 -0.78001 -0.12878
+-0.604193 0.0997527 -0.769917 -0.17952
+-0.609424 0.060023 -0.756528 -0.22949
+-0.612045 0.0200363 -0.739899 -0.278478
+0.645152 0.0211201 0.661438 0.381881
+0.642389 0.0632698 0.635045 0.424324
+0.636876 0.105149 0.605934 0.464949
+0.628635 0.146577 0.574227 0.503584
+0.617702 0.187378 0.540062 0.540062
+0.604125 0.227376 0.503584 0.574227
+0.58796 0.266401 0.464949 0.605934
+0.569278 0.304285 0.424324 0.635045
+0.548158 0.340866 0.381881 0.661438
+0.52469 0.375988 0.337804 0.684998
+0.498976 0.409499 0.292279 0.705625
+0.471125 0.441257 0.245503 0.72323
+0.441257 0.471125 0.197676 0.737738
+0.409499 0.498976 0.149003 0.749087
+0.375988 0.52469 0.099691 0.757229
+0.340866 0.548158 0.0499524 0.762127
+0.304285 0.569278 -6.27856e-08 0.763763
+0.266401 0.58796 -0.0499525 0.762127
+0.227376 0.604125 -0.0996911 0.757229
+0.187378 0.617702 -0.149003 0.749087
+0.146577 0.628635 -0.197676 0.737738
+0.105148 0.636876 -0.245504 0.72323
+0.0632697 0.642389 -0.292279 0.705625
+0.02112 0.645152 -0.337804 0.684998
+-0.0211201 0.645152 -0.381881 0.661438
+-0.0632698 0.642389 -0.424324 0.635045
+-0.105149 0.636876 -0.464949 0.605934
+-0.146577 0.628635 -0.503584 0.574227
+-0.187378 0.617702 -0.540062 0.540062
+-0.227377 0.604125 -0.574227 0.503584
+-0.266401 0.58796 -0.605934 0.464949
+-0.304285 0.569278 -0.635045 0.424324
+-0.340866 0.548158 -0.661438 0.381881
+-0.375988 0.52469 -0.684998 0.337803
+-0.409499 0.498976 -0.705625 0.292279
+-0.441257 0.471125 -0.72323 0.245503
+-0.471125 0.441257 -0.737738 0.197676
+-0.498976 0.409499 -0.749087 0.149003
+-0.52469 0.375988 -0.757229 0.0996911
+-0.548158 0.340866 -0.762127 0.0499524
+-0.569278 0.304285 -0.763763 8.59245e-08
+-0.58796 0.266401 -0.762127 -0.0499525
+-0.604125 0.227376 -0.757229 -0.0996911
+-0.617702 0.187378 -0.749087 -0.149003
+-0.628635 0.146577 -0.737738 -0.197676
+-0.636876 0.105149 -0.72323 -0.245503
+-0.642389 0.0632698 -0.705625 -0.292279
+-0.645152 0.0211201 -0.684998 -0.337804
+0.645152 0.0211201 0.72323 0.245503
+0.642389 0.0632698 0.705625 0.292279
+0.636876 0.105149 0.684998 0.337804
+0.628635 0.146577 0.661438 0.381881
+0.617702 0.187378 0.635045 0.424324
+0.604125 0.227376 0.605934 0.464949
+0.58796 0.266401 0.574227 0.503584
+0.569278 0.304285 0.540062 0.540062
+0.548158 0.340866 0.503584 0.574227
+0.52469 0.375988 0.464949 0.605934
+0.498976 0.409499 0.424324 0.635045
+0.471125 0.441257 0.381881 0.661438
+0.441257 0.471125 0.337804 0.684998
+0.409499 0.498976 0.292279 0.705625
+0.375988 0.52469 0.245503 0.72323
+0.340866 0.548158 0.197676 0.737738
+0.304285 0.569278 0.149003 0.749087
+0.266401 0.58796 0.099691 0.757229
+0.227376 0.604125 0.0499524 0.762127
+0.187378 0.617702 -3.28159e-08 0.763763
+0.146577 0.628635 -0.0499525 0.762127
+0.105148 0.636876 -0.0996911 0.757229
+0.0632697 0.642389 -0.149003 0.749087
+0.02112 0.645152 -0.197676 0.737738
+-0.0211201 0.645152 -0.245504 0.72323
+-0.0632698 0.642389 -0.292279 0.705625
+-0.105149 0.636876 -0.337804 0.684998
+-0.146577 0.628635 -0.381881 0.661438
+-0.187378 0.617702 -0.424324 0.635045
+-0.227377 0.604125 -0.464949 0.605934
+-0.266401 0.58796 -0.503584 0.574227
+-0.304285 0.569278 -0.540062 0.540062
+-0.340866 0.548158 -0.574227 0.503584
+-0.375988 0.52469 -0.605934 0.464949
+-0.409499 0.498976 -0.635045 0.424324
+-0.441257 0.471125 -0.661438 0.381881
+-0.471125 0.441257 -0.684998 0.337804
+-0.498976 0.409499 -0.705625 0.292279
+-0.52469 0.375988 -0.72323 0.245504
+-0.548158 0.340866 -0.737738 0.197676
+-0.569278 0.304285 -0.749087 0.149003
+-0.58796 0.266401 -0.757229 0.099691
+-0.604125 0.227376 -0.762127 0.0499524
+-0.617702 0.187378 -0.763763 2.48466e-08
+-0.628635 0.146577 -0.762127 -0.0499525
+-0.636876 0.105149 -0.757229 -0.099691
+-0.642389 0.0632698 -0.749087 -0.149003
+-0.645152 0.0211201 -0.737738 -0.197676
+0.676641 0.0221509 0.670378 0.303744
+0.673743 0.0663579 0.649076 0.346939
+0.667961 0.110281 0.624996 0.388647
+0.659318 0.153731 0.598239 0.428692
+0.647852 0.196524 0.56892 0.466901
+0.633611 0.238474 0.537165 0.50311
+0.616658 0.279404 0.50311 0.537165
+0.597064 0.319137 0.466901 0.56892
+0.574913 0.357504 0.428692 0.598239
+0.5503 0.394339 0.388647 0.624996
+0.523331 0.429486 0.346939 0.649076
+0.49412 0.462794 0.303744 0.670378
+0.462794 0.49412 0.259249 0.688808
+0.429486 0.523331 0.213644 0.704289
+0.394339 0.5503 0.167124 0.716754
+0.357504 0.574913 0.119888 0.72615
+0.319137 0.597064 0.0721386 0.732436
+0.279404 0.616658 0.0240805 0.735586
+0.238474 0.633611 -0.0240806 0.735586
+0.196524 0.647852 -0.0721387 0.732436
+0.153731 0.659318 -0.119888 0.72615
+0.110281 0.667961 -0.167124 0.716754
+0.0663578 0.673743 -0.213644 0.704289
+0.0221508 0.676641 -0.259249 0.688808
+-0.022151 0.676641 -0.303744 0.670378
+-0.066358 0.673743 -0.346939 0.649076
+-0.110281 0.667961 -0.388647 0.624996
+-0.153731 0.659318 -0.428692 0.598239
+-0.196524 0.647852 -0.466901 0.56892
+-0.238475 0.633611 -0.50311 0.537165
+-0.279404 0.616658 -0.537165 0.50311
+-0.319137 0.597064 -0.56892 0.466901
+-0.357504 0.574913 -0.598239 0.428692
+-0.394339 0.5503 -0.624996 0.388647
+-0.429486 0.523331 -0.649076 0.346939
+-0.462794 0.49412 -0.670378 0.303744
+-0.49412 0.462794 -0.688808 0.259249
+-0.523331 0.429486 -0.704289 0.213644
+-0.5503 0.394339 -0.716754 0.167124
+-0.574913 0.357504 -0.72615 0.119888
+-0.597063 0.319137 -0.732436 0.0721387
+-0.616658 0.279404 -0.735586 0.0240806
+-0.633611 0.238474 -0.735586 -0.0240807
+-0.647852 0.196524 -0.732436 -0.0721386
+-0.659318 0.153731 -0.72615 -0.119888
+-0.667961 0.110281 -0.716754 -0.167124
+-0.673743 0.0663579 -0.704289 -0.213644
+-0.676641 0.022151 -0.688808 -0.259249
+0.676641 0.0221509 0.598239 0.428692
+0.673743 0.0663579 0.56892 0.466901
+0.667961 0.110281 0.537165 0.50311
+0.659318 0.153731 0.50311 0.537165
+0.647852 0.196524 0.466901 0.56892
+0.633611 0.238474 0.428692 0.598239
+0.616658 0.279404 0.388647 0.624996
+0.597064 0.319137 0.346939 0.649076
+0.574913 0.357504 0.303744 0.670378
+0.5503 0.394339 0.259249 0.688808
+0.523331 0.429486 0.213644 0.704289
+0.49412 0.462794 0.167124 0.716754
+0.462794 0.49412 0.119888 0.72615
+0.429486 0.523331 0.0721387 0.732436
+0.394339 0.5503 0.0240806 0.735586
+0.357504 0.574913 -0.0240807 0.735586
+0.319137 0.597064 -0.0721387 0.732436
+0.279404 0.616658 -0.119888 0.72615
+0.238474 0.633611 -0.167124 0.716754
+0.196524 0.647852 -0.213644 0.704289
+0.153731 0.659318 -0.259249 0.688808
+0.110281 0.667961 -0.303744 0.670378
+0.0663578 0.673743 -0.346939 0.649076
+0.0221508 0.676641 -0.388647 0.624996
+-0.022151 0.676641 -0.428692 0.598239
+-0.066358 0.673743 -0.466901 0.56892
+-0.110281 0.667961 -0.50311 0.537165
+-0.153731 0.659318 -0.537165 0.50311
+-0.196524 0.647852 -0.56892 0.466901
+-0.238475 0.633611 -0.598239 0.428692
+-0.279404 0.616658 -0.624996 0.388647
+-0.319137 0.597064 -0.649076 0.346939
+-0.357504 0.574913 -0.670378 0.303744
+-0.394339 0.5503 -0.688808 0.259249
+-0.429486 0.523331 -0.704289 0.213644
+-0.462794 0.49412 -0.716754 0.167124
+-0.49412 0.462794 -0.72615 0.119888
+-0.523331 0.429486 -0.732436 0.0721386
+-0.5503 0.394339 -0.735586 0.0240807
+-0.574913 0.357504 -0.735586 -0.0240806
+-0.597063 0.319137 -0.732436 -0.0721386
+-0.616658 0.279404 -0.72615 -0.119888
+-0.633611 0.238474 -0.716754 -0.167124
+-0.647852 0.196524 -0.704289 -0.213644
+-0.659318 0.153731 -0.688808 -0.259249
+-0.667961 0.110281 -0.670378 -0.303744
+-0.673743 0.0663579 -0.649076 -0.346939
+-0.676641 0.022151 -0.624996 -0.388647
+0.706728 0.0231359 0.531631 0.466228
+0.703702 0.0693086 0.5 0.5
+0.697662 0.115184 0.466228 0.531631
+0.688635 0.160567 0.430459 0.560986
+0.676659 0.205262 0.392847 0.587938
+0.661785 0.249078 0.353553 0.612372
+0.644078 0.291828 0.312745 0.634185
+0.623612 0.333328 0.270598 0.653281
+0.600477 0.3734 0.227292 0.669581
+0.574769 0.411874 0.183013 0.683013
+0.546601 0.448584 0.13795 0.69352
+0.516092 0.483373 0.0922959 0.701057
+0.483373 0.516092 0.046247 0.705593
+0.448584 0.546601 -1.58103e-09 0.707107
+0.411874 0.574769 -0.046247 0.705593
+0.3734 0.600477 -0.092296 0.701057
+0.333328 0.623613 -0.13795 0.69352
+0.291828 0.644078 -0.183013 0.683013
+0.249078 0.661785 -0.227292 0.669581
+0.205262 0.676659 -0.270598 0.653281
+0.160567 0.688635 -0.312745 0.634185
+0.115184 0.697662 -0.353553 0.612372
+0.0693085 0.703702 -0.392848 0.587938
+0.0231358 0.706728 -0.430459 0.560985
+-0.023136 0.706728 -0.466228 0.531631
+-0.0693086 0.703702 -0.5 0.5
+-0.115185 0.697662 -0.531631 0.466228
+-0.160567 0.688635 -0.560986 0.430459
+-0.205262 0.676659 -0.587938 0.392847
+-0.249078 0.661785 -0.612372 0.353553
+-0.291828 0.644078 -0.634185 0.312745
+-0.333328 0.623613 -0.653281 0.270598
+-0.3734 0.600477 -0.669581 0.227292
+-0.411874 0.574769 -0.683013 0.183013
+-0.448584 0.546601 -0.69352 0.13795
+-0.483373 0.516092 -0.701057 0.0922959
+-0.516092 0.483373 -0.705593 0.046247
+-0.546601 0.448584 -0.707107 -3.24897e-08
+-0.574769 0.411874 -0.705593 -0.0462469
+-0.600477 0.3734 -0.701057 -0.092296
+-0.623612 0.333328 -0.69352 -0.13795
+-0.644078 0.291828 -0.683013 -0.183013
+-0.661785 0.249078 -0.669581 -0.227292
+-0.676659 0.205262 -0.653281 -0.270598
+-0.688635 0.160567 -0.634185 -0.312745
+-0.697662 0.115185 -0.612372 -0.353553
+-0.703702 0.0693086 -0.587938 -0.392848
+-0.706728 0.0231359 -0.560986 -0.430459
+0.706728 0.0231359 0.612372 0.353553
+0.703702 0.0693086 0.587938 0.392847
+0.697662 0.115184 0.560986 0.430459
+0.688635 0.160567 0.531631 0.466228
+0.676659 0.205262 0.5 0.5
+0.661785 0.249078 0.466228 0.531631
+0.644078 0.291828 0.430459 0.560986
+0.623612 0.333328 0.392847 0.587938
+0.600477 0.3734 0.353553 0.612372
+0.574769 0.411874 0.312745 0.634185
+0.546601 0.448584 0.270598 0.653281
+0.516092 0.483373 0.227292 0.669581
+0.483373 0.516092 0.183013 0.683013
+0.448584 0.546601 0.13795 0.69352
+0.411874 0.574769 0.0922959 0.701057
+0.3734 0.600477 0.0462469 0.705593
+0.333328 0.623613 -5.81282e-08 0.707107
+0.291828 0.644078 -0.046247 0.705593
+0.249078 0.661785 -0.092296 0.701057
+0.205262 0.676659 -0.13795 0.69352
+0.160567 0.688635 -0.183013 0.683013
+0.115184 0.697662 -0.227292 0.669581
+0.0693085 0.703702 -0.270598 0.653281
+0.0231358 0.706728 -0.312745 0.634185
+-0.023136 0.706728 -0.353553 0.612372
+-0.0693086 0.703702 -0.392848 0.587938
+-0.115185 0.697662 -0.430459 0.560985
+-0.160567 0.688635 -0.466228 0.531631
+-0.205262 0.676659 -0.5 0.5
+-0.249078 0.661785 -0.531631 0.466228
+-0.291828 0.644078 -0.560986 0.430459
+-0.333328 0.623613 -0.587938 0.392848
+-0.3734 0.600477 -0.612372 0.353553
+-0.411874 0.574769 -0.634185 0.312745
+-0.448584 0.546601 -0.653281 0.270598
+-0.483373 0.516092 -0.669581 0.227292
+-0.516092 0.483373 -0.683013 0.183013
+-0.546601 0.448584 -0.69352 0.13795
+-0.574769 0.411874 -0.701057 0.092296
+-0.600477 0.3734 -0.705593 0.046247
+-0.623612 0.333328 -0.707107 7.95506e-08
+-0.644078 0.291828 -0.705593 -0.046247
+-0.661785 0.249078 -0.701057 -0.092296
+-0.676659 0.205262 -0.69352 -0.13795
+-0.688635 0.160567 -0.683013 -0.183013
+-0.697662 0.115185 -0.669581 -0.227292
+-0.703702 0.0693086 -0.653281 -0.270598
+-0.706728 0.0231359 -0.634185 -0.312745
+0.735586 0.0240806 0.5503 0.394339
+0.732436 0.0721387 0.523331 0.429486
+0.72615 0.119888 0.49412 0.462794
+0.716754 0.167124 0.462794 0.49412
+0.704289 0.213644 0.429486 0.523331
+0.688808 0.259249 0.394339 0.5503
+0.670378 0.303744 0.357504 0.574913
+0.649076 0.346939 0.319137 0.597064
+0.624996 0.388647 0.279404 0.616658
+0.598239 0.428692 0.238474 0.633611
+0.56892 0.466901 0.196524 0.647852
+0.537165 0.50311 0.153731 0.659318
+0.50311 0.537165 0.110281 0.667961
+0.466901 0.56892 0.0663579 0.673743
+0.428692 0.598239 0.0221509 0.676641
+0.388647 0.624996 -0.022151 0.676641
+0.346939 0.649077 -0.066358 0.673743
+0.303744 0.670378 -0.110281 0.667961
+0.259249 0.688808 -0.153731 0.659318
+0.213644 0.704289 -0.196524 0.647852
+0.167124 0.716754 -0.238474 0.633611
+0.119888 0.72615 -0.279404 0.616658
+0.0721386 0.732436 -0.319137 0.597063
+0.0240805 0.735586 -0.357504 0.574913
+-0.0240807 0.735586 -0.394339 0.5503
+-0.0721387 0.732436 -0.429486 0.523331
+-0.119888 0.72615 -0.462794 0.49412
+-0.167124 0.716754 -0.49412 0.462794
+-0.213644 0.704289 -0.523331 0.429486
+-0.259249 0.688808 -0.5503 0.394339
+-0.303744 0.670378 -0.574913 0.357504
+-0.346939 0.649077 -0.597063 0.319137
+-0.388647 0.624996 -0.616658 0.279404
+-0.428692 0.598239 -0.633611 0.238474
+-0.466901 0.56892 -0.647852 0.196524
+-0.50311 0.537165 -0.659318 0.153731
+-0.537165 0.50311 -0.667961 0.110281
+-0.56892 0.466901 -0.673743 0.0663579
+-0.598239 0.428692 -0.676641 0.022151
+-0.624996 0.388647 -0.676641 -0.0221509
+-0.649076 0.346939 -0.673743 -0.0663578
+-0.670378 0.303744 -0.667961 -0.110281
+-0.688808 0.259249 -0.659318 -0.153731
+-0.704289 0.213644 -0.647852 -0.196524
+-0.716754 0.167124 -0.633611 -0.238474
+-0.72615 0.119888 -0.616658 -0.279404
+-0.732436 0.0721386 -0.597064 -0.319137
+-0.735586 0.0240807 -0.574913 -0.357504
+0.676641 0.0221509 0.716754 0.167124
+0.673743 0.0663579 0.704289 0.213644
+0.667961 0.110281 0.688808 0.259249
+0.659318 0.153731 0.670378 0.303744
+0.647852 0.196524 0.649076 0.346939
+0.633611 0.238474 0.624996 0.388647
+0.616658 0.279404 0.598239 0.428692
+0.597064 0.319137 0.56892 0.466901
+0.574913 0.357504 0.537165 0.50311
+0.5503 0.394339 0.50311 0.537165
+0.523331 0.429486 0.466901 0.56892
+0.49412 0.462794 0.428692 0.598239
+0.462794 0.49412 0.388647 0.624996
+0.429486 0.523331 0.346939 0.649076
+0.394339 0.5503 0.303744 0.670378
+0.357504 0.574913 0.259249 0.688808
+0.319137 0.597064 0.213644 0.704289
+0.279404 0.616658 0.167124 0.716754
+0.238474 0.633611 0.119888 0.72615
+0.196524 0.647852 0.0721386 0.732436
+0.153731 0.659318 0.0240806 0.735586
+0.110281 0.667961 -0.0240807 0.735586
+0.0663578 0.673743 -0.0721388 0.732436
+0.0221508 0.676641 -0.119888 0.72615
+-0.022151 0.676641 -0.167124 0.716754
+-0.066358 0.673743 -0.213644 0.704289
+-0.110281 0.667961 -0.259249 0.688808
+-0.153731 0.659318 -0.303744 0.670378
+-0.196524 0.647852 -0.346939 0.649076
+-0.238475 0.633611 -0.388648 0.624996
+-0.279404 0.616658 -0.428692 0.598239
+-0.319137 0.597064 -0.466901 0.56892
+-0.357504 0.574913 -0.50311 0.537165
+-0.394339 0.5503 -0.537165 0.50311
+-0.429486 0.523331 -0.56892 0.466901
+-0.462794 0.49412 -0.598239 0.428692
+-0.49412 0.462794 -0.624996 0.388647
+-0.523331 0.429486 -0.649076 0.346939
+-0.5503 0.394339 -0.670378 0.303744
+-0.574913 0.357504 -0.688808 0.259249
+-0.597063 0.319137 -0.704289 0.213644
+-0.616658 0.279404 -0.716754 0.167124
+-0.633611 0.238474 -0.72615 0.119888
+-0.647852 0.196524 -0.732436 0.0721387
+-0.659318 0.153731 -0.735586 0.0240805
+-0.667961 0.110281 -0.735586 -0.0240805
+-0.673743 0.0663579 -0.732436 -0.0721387
+-0.676641 0.022151 -0.72615 -0.119888
+0.706728 0.0231359 0.669581 0.227292
+0.703702 0.0693086 0.653281 0.270598
+0.697662 0.115184 0.634185 0.312745
+0.688635 0.160567 0.612372 0.353553
+0.676659 0.205262 0.587938 0.392847
+0.661785 0.249078 0.560986 0.430459
+0.644078 0.291828 0.531631 0.466228
+0.623612 0.333328 0.5 0.5
+0.600477 0.3734 0.466228 0.531631
+0.574769 0.411874 0.430459 0.560986
+0.546601 0.448584 0.392847 0.587938
+0.516092 0.483373 0.353553 0.612372
+0.483373 0.516092 0.312745 0.634185
+0.448584 0.546601 0.270598 0.653281
+0.411874 0.574769 0.227292 0.669581
+0.3734 0.600477 0.183013 0.683013
+0.333328 0.623613 0.13795 0.69352
+0.291828 0.644078 0.0922959 0.701057
+0.249078 0.661785 0.046247 0.705593
+0.205262 0.676659 -3.03816e-08 0.707107
+0.160567 0.688635 -0.046247 0.705593
+0.115184 0.697662 -0.0922961 0.701057
+0.0693085 0.703702 -0.13795 0.69352
+0.0231358 0.706728 -0.183013 0.683013
+-0.023136 0.706728 -0.227292 0.669581
+-0.0693086 0.703702 -0.270598 0.653281
+-0.115185 0.697662 -0.312745 0.634185
+-0.160567 0.688635 -0.353553 0.612372
+-0.205262 0.676659 -0.392848 0.587938
+-0.249078 0.661785 -0.430459 0.560985
+-0.291828 0.644078 -0.466228 0.531631
+-0.333328 0.623613 -0.5 0.5
+-0.3734 0.600477 -0.531631 0.466228
+-0.411874 0.574769 -0.560986 0.430459
+-0.448584 0.546601 -0.587938 0.392847
+-0.483373 0.516092 -0.612372 0.353553
+-0.516092 0.483373 -0.634185 0.312745
+-0.546601 0.448584 -0.653281 0.270598
+-0.574769 0.411874 -0.669581 0.227292
+-0.600477 0.3734 -0.683013 0.183013
+-0.623612 0.333328 -0.69352 0.13795
+-0.644078 0.291828 -0.701057 0.092296
+-0.661785 0.249078 -0.705593 0.0462469
+-0.676659 0.205262 -0.707107 2.30035e-08
+-0.688635 0.160567 -0.705593 -0.046247
+-0.697662 0.115185 -0.701057 -0.0922959
+-0.703702 0.0693086 -0.69352 -0.13795
+-0.706728 0.0231359 -0.683013 -0.183013
+0.706728 0.0231359 0.701057 0.092296
+0.703702 0.0693086 0.69352 0.13795
+0.697662 0.115184 0.683013 0.183013
+0.688635 0.160567 0.669581 0.227292
+0.676659 0.205262 0.653281 0.270598
+0.661785 0.249078 0.634185 0.312745
+0.644078 0.291828 0.612372 0.353553
+0.623612 0.333328 0.587938 0.392847
+0.600477 0.3734 0.560986 0.430459
+0.574769 0.411874 0.531631 0.466228
+0.546601 0.448584 0.5 0.5
+0.516092 0.483373 0.466228 0.531631
+0.483373 0.516092 0.430459 0.560986
+0.448584 0.546601 0.392847 0.587938
+0.411874 0.574769 0.353553 0.612372
+0.3734 0.600477 0.312745 0.634185
+0.333328 0.623613 0.270598 0.653282
+0.291828 0.644078 0.227292 0.669581
+0.249078 0.661785 0.183013 0.683013
+0.205262 0.676659 0.13795 0.69352
+0.160567 0.688635 0.0922959 0.701057
+0.115184 0.697662 0.0462469 0.705593
+0.0693085 0.703702 -8.69287e-08 0.707107
+0.0231358 0.706728 -0.0462471 0.705593
+-0.023136 0.706728 -0.092296 0.701057
+-0.0693086 0.703702 -0.13795 0.69352
+-0.115185 0.697662 -0.183013 0.683013
+-0.160567 0.688635 -0.227292 0.669581
+-0.205262 0.676659 -0.270598 0.653281
+-0.249078 0.661785 -0.312745 0.634185
+-0.291828 0.644078 -0.353553 0.612372
+-0.333328 0.623613 -0.392847 0.587938
+-0.3734 0.600477 -0.430459 0.560986
+-0.411874 0.574769 -0.466228 0.531631
+-0.448584 0.546601 -0.5 0.5
+-0.483373 0.516092 -0.531631 0.466228
+-0.516092 0.483373 -0.560986 0.430459
+-0.546601 0.448584 -0.587938 0.392847
+-0.574769 0.411874 -0.612372 0.353553
+-0.600477 0.3734 -0.634185 0.312745
+-0.623612 0.333328 -0.653281 0.270598
+-0.644078 0.291828 -0.669581 0.227292
+-0.661785 0.249078 -0.683013 0.183013
+-0.676659 0.205262 -0.69352 0.13795
+-0.688635 0.160567 -0.701057 0.0922959
+-0.697662 0.115185 -0.705593 0.046247
+-0.703702 0.0693086 -0.707107 -3.35437e-08
+-0.706728 0.0231359 -0.705593 -0.0462469
+0.735586 0.0240806 0.659318 0.153731
+0.732436 0.0721387 0.647852 0.196524
+0.72615 0.119888 0.633611 0.238474
+0.716754 0.167124 0.616658 0.279404
+0.704289 0.213644 0.597064 0.319137
+0.688808 0.259249 0.574913 0.357504
+0.670378 0.303744 0.5503 0.394339
+0.649076 0.346939 0.523331 0.429486
+0.624996 0.388647 0.49412 0.462794
+0.598239 0.428692 0.462794 0.49412
+0.56892 0.466901 0.429486 0.523331
+0.537165 0.50311 0.394339 0.5503
+0.50311 0.537165 0.357504 0.574913
+0.466901 0.56892 0.319137 0.597064
+0.428692 0.598239 0.279404 0.616658
+0.388647 0.624996 0.238474 0.633611
+0.346939 0.649077 0.196524 0.647852
+0.303744 0.670378 0.153731 0.659318
+0.259249 0.688808 0.110281 0.667961
+0.213644 0.704289 0.0663579 0.673743
+0.167124 0.716754 0.0221509 0.676641
+0.119888 0.72615 -0.022151 0.676641
+0.0721386 0.732436 -0.066358 0.673743
+0.0240805 0.735586 -0.110281 0.667961
+-0.0240807 0.735586 -0.153731 0.659318
+-0.0721387 0.732436 -0.196524 0.647852
+-0.119888 0.72615 -0.238474 0.633611
+-0.167124 0.716754 -0.279404 0.616658
+-0.213644 0.704289 -0.319137 0.597063
+-0.259249 0.688808 -0.357504 0.574913
+-0.303744 0.670378 -0.394339 0.5503
+-0.346939 0.649077 -0.429486 0.523331
+-0.388647 0.624996 -0.462794 0.49412
+-0.428692 0.598239 -0.49412 0.462794
+-0.466901 0.56892 -0.523331 0.429486
+-0.50311 0.537165 -0.5503 0.394339
+-0.537165 0.50311 -0.574913 0.357504
+-0.56892 0.466901 -0.597064 0.319137
+-0.598239 0.428692 -0.616658 0.279404
+-0.624996 0.388647 -0.633611 0.238474
+-0.649076 0.346939 -0.647852 0.196524
+-0.670378 0.303744 -0.659318 0.153731
+-0.688808 0.259249 -0.667961 0.110281
+-0.704289 0.213644 -0.673743 0.0663579
+-0.716754 0.167124 -0.676641 0.0221509
+-0.72615 0.119888 -0.676641 -0.0221509
+-0.732436 0.0721386 -0.673743 -0.0663579
+-0.735586 0.0240807 -0.667961 -0.110281
+0.735586 0.0240806 0.616658 0.279404
+0.732436 0.0721387 0.597064 0.319137
+0.72615 0.119888 0.574913 0.357504
+0.716754 0.167124 0.5503 0.394339
+0.704289 0.213644 0.523331 0.429486
+0.688808 0.259249 0.49412 0.462794
+0.670378 0.303744 0.462794 0.49412
+0.649076 0.346939 0.429486 0.523331
+0.624996 0.388647 0.394339 0.5503
+0.598239 0.428692 0.357504 0.574913
+0.56892 0.466901 0.319137 0.597064
+0.537165 0.50311 0.279404 0.616658
+0.50311 0.537165 0.238474 0.633611
+0.466901 0.56892 0.196524 0.647852
+0.428692 0.598239 0.153731 0.659318
+0.388647 0.624996 0.110281 0.667961
+0.346939 0.649077 0.0663579 0.673743
+0.303744 0.670378 0.0221509 0.676641
+0.259249 0.688808 -0.022151 0.676641
+0.213644 0.704289 -0.0663579 0.673743
+0.167124 0.716754 -0.110281 0.667961
+0.119888 0.72615 -0.153731 0.659318
+0.0721386 0.732436 -0.196524 0.647852
+0.0240805 0.735586 -0.238474 0.633611
+-0.0240807 0.735586 -0.279404 0.616658
+-0.0721387 0.732436 -0.319137 0.597063
+-0.119888 0.72615 -0.357504 0.574913
+-0.167124 0.716754 -0.394339 0.5503
+-0.213644 0.704289 -0.429486 0.52333
+-0.259249 0.688808 -0.462794 0.49412
+-0.303744 0.670378 -0.49412 0.462794
+-0.346939 0.649077 -0.523331 0.429486
+-0.388647 0.624996 -0.5503 0.394339
+-0.428692 0.598239 -0.574913 0.357504
+-0.466901 0.56892 -0.597064 0.319137
+-0.50311 0.537165 -0.616658 0.279404
+-0.537165 0.50311 -0.633611 0.238474
+-0.56892 0.466901 -0.647852 0.196524
+-0.598239 0.428692 -0.659318 0.153731
+-0.624996 0.388647 -0.667961 0.110281
+-0.649076 0.346939 -0.673743 0.066358
+-0.670378 0.303744 -0.676641 0.0221509
+-0.688808 0.259249 -0.676641 -0.022151
+-0.704289 0.213644 -0.673743 -0.0663579
+-0.716754 0.167124 -0.667961 -0.110281
+-0.72615 0.119888 -0.659318 -0.153731
+-0.732436 0.0721386 -0.647852 -0.196524
+-0.735586 0.0240807 -0.633611 -0.238474
+0.763354 0.0249896 0.559017 0.322749
+0.760085 0.0748618 0.536711 0.358619
+0.753561 0.124413 0.512107 0.392954
+0.743811 0.173432 0.485311 0.425606
+0.730875 0.221709 0.456435 0.456435
+0.71481 0.269035 0.425606 0.485311
+0.695684 0.31521 0.392954 0.512107
+0.673578 0.360035 0.358619 0.536711
+0.648589 0.403318 0.322749 0.559017
+0.620822 0.444875 0.285496 0.578929
+0.590396 0.484526 0.247021 0.596362
+0.557443 0.522102 0.207488 0.611241
+0.522102 0.557443 0.167067 0.623502
+0.484526 0.590396 0.12593 0.633094
+0.444875 0.620822 0.0842543 0.639975
+0.403318 0.648589 0.0422175 0.644115
+0.360035 0.673579 -5.30635e-08 0.645497
+0.31521 0.695684 -0.0422176 0.644115
+0.269035 0.71481 -0.0842543 0.639975
+0.221709 0.730875 -0.12593 0.633094
+0.173432 0.743811 -0.167067 0.623502
+0.124413 0.753561 -0.207488 0.611241
+0.0748617 0.760085 -0.247021 0.596362
+0.0249895 0.763354 -0.285496 0.578929
+-0.0249897 0.763354 -0.322749 0.559017
+-0.0748619 0.760085 -0.358619 0.536711
+-0.124414 0.753561 -0.392954 0.512107
+-0.173432 0.743811 -0.425606 0.48531
+-0.221709 0.730875 -0.456436 0.456435
+-0.269036 0.71481 -0.485311 0.425606
+-0.31521 0.695684 -0.512107 0.392954
+-0.360035 0.673579 -0.536711 0.358619
+-0.403318 0.648589 -0.559017 0.322749
+-0.444875 0.620822 -0.578929 0.285496
+-0.484526 0.590397 -0.596362 0.247021
+-0.522102 0.557443 -0.611241 0.207488
+-0.557443 0.522102 -0.623502 0.167067
+-0.590397 0.484526 -0.633094 0.12593
+-0.620822 0.444875 -0.639975 0.0842544
+-0.648589 0.403318 -0.644115 0.0422175
+-0.673578 0.360035 -0.645497 7.26194e-08
+-0.695684 0.31521 -0.644115 -0.0422175
+-0.71481 0.269035 -0.639975 -0.0842544
+-0.730875 0.221709 -0.633094 -0.12593
+-0.743811 0.173432 -0.623502 -0.167067
+-0.753561 0.124414 -0.611241 -0.207488
+-0.760085 0.0748618 -0.596362 -0.247021
+-0.763354 0.0249897 -0.578929 -0.285496
+0.763354 0.0249896 0.611241 0.207488
+0.760085 0.0748618 0.596362 0.247021
+0.753561 0.124413 0.578929 0.285496
+0.743811 0.173432 0.559017 0.322749
+0.730875 0.221709 0.536711 0.358619
+0.71481 0.269035 0.512107 0.392954
+0.695684 0.31521 0.485311 0.425606
+0.673578 0.360035 0.456435 0.456435
+0.648589 0.403318 0.425606 0.485311
+0.620822 0.444875 0.392954 0.512107
+0.590396 0.484526 0.358619 0.536711
+0.557443 0.522102 0.322749 0.559017
+0.522102 0.557443 0.285496 0.578929
+0.484526 0.590396 0.247021 0.596362
+0.444875 0.620822 0.207488 0.611241
+0.403318 0.648589 0.167067 0.623502
+0.360035 0.673579 0.12593 0.633094
+0.31521 0.695684 0.0842543 0.639975
+0.269035 0.71481 0.0422175 0.644115
+0.221709 0.730875 -2.77345e-08 0.645497
+0.173432 0.743811 -0.0422176 0.644115
+0.124413 0.753561 -0.0842544 0.639975
+0.0748617 0.760085 -0.12593 0.633094
+0.0249895 0.763354 -0.167067 0.623502
+-0.0249897 0.763354 -0.207488 0.611241
+-0.0748619 0.760085 -0.247021 0.596362
+-0.124414 0.753561 -0.285496 0.578929
+-0.173432 0.743811 -0.322749 0.559017
+-0.221709 0.730875 -0.358619 0.536711
+-0.269036 0.71481 -0.392954 0.512107
+-0.31521 0.695684 -0.425606 0.48531
+-0.360035 0.673579 -0.456435 0.456436
+-0.403318 0.648589 -0.485311 0.425606
+-0.444875 0.620822 -0.512107 0.392954
+-0.484526 0.590397 -0.536711 0.358619
+-0.522102 0.557443 -0.559017 0.322749
+-0.557443 0.522102 -0.578929 0.285496
+-0.590397 0.484526 -0.596362 0.247021
+-0.620822 0.444875 -0.611241 0.207488
+-0.648589 0.403318 -0.623502 0.167067
+-0.673578 0.360035 -0.633094 0.12593
+-0.695684 0.31521 -0.639975 0.0842543
+-0.71481 0.269035 -0.644115 0.0422175
+-0.730875 0.221709 -0.645497 2.09992e-08
+-0.743811 0.173432 -0.644115 -0.0422176
+-0.753561 0.124414 -0.639975 -0.0842543
+-0.760085 0.0748618 -0.633094 -0.12593
+-0.763354 0.0249897 -0.623502 -0.167067
+0.790146 0.0258667 0.557788 0.25273
+0.786763 0.0774894 0.540064 0.28867
+0.78001 0.12878 0.520028 0.323374
+0.769917 0.17952 0.497765 0.356693
+0.756528 0.22949 0.47337 0.388485
+0.739899 0.278478 0.446949 0.418613
+0.720101 0.326274 0.418613 0.446949
+0.69722 0.372672 0.388485 0.47337
+0.671353 0.417474 0.356693 0.497765
+0.642612 0.460489 0.323374 0.520028
+0.611118 0.501532 0.28867 0.540064
+0.577008 0.540427 0.25273 0.557788
+0.540427 0.577008 0.215708 0.573123
+0.501532 0.611118 0.177762 0.586004
+0.460489 0.642612 0.139055 0.596375
+0.417474 0.671353 0.0997526 0.604193
+0.372672 0.69722 0.0600229 0.609424
+0.326274 0.720101 0.0200362 0.612045
+0.278478 0.739899 -0.0200363 0.612045
+0.22949 0.756528 -0.060023 0.609424
+0.17952 0.769917 -0.0997527 0.604193
+0.12878 0.78001 -0.139055 0.596375
+0.0774893 0.786763 -0.177762 0.586004
+0.0258666 0.790146 -0.215708 0.573123
+-0.0258668 0.790146 -0.25273 0.557788
+-0.0774894 0.786763 -0.28867 0.540064
+-0.12878 0.78001 -0.323374 0.520028
+-0.17952 0.769917 -0.356693 0.497765
+-0.22949 0.756528 -0.388485 0.47337
+-0.278478 0.739899 -0.418613 0.446949
+-0.326274 0.720101 -0.446949 0.418613
+-0.372672 0.69722 -0.47337 0.388485
+-0.417474 0.671353 -0.497765 0.356693
+-0.460489 0.642612 -0.520028 0.323374
+-0.501532 0.611118 -0.540064 0.28867
+-0.540427 0.577008 -0.557788 0.25273
+-0.577008 0.540427 -0.573123 0.215708
+-0.611118 0.501532 -0.586004 0.177762
+-0.642612 0.460489 -0.596375 0.139055
+-0.671353 0.417474 -0.604193 0.0997527
+-0.69722 0.372672 -0.609424 0.0600231
+-0.720101 0.326274 -0.612045 0.0200363
+-0.739899 0.278478 -0.612045 -0.0200363
+-0.756528 0.22949 -0.609424 -0.060023
+-0.769917 0.179519 -0.604193 -0.0997527
+-0.78001 0.12878 -0.596375 -0.139055
+-0.786763 0.0774893 -0.586004 -0.177762
+-0.790146 0.0258668 -0.573123 -0.215708
+0.612045 0.0200363 0.739899 -0.278478
+0.609424 0.060023 0.756528 -0.22949
+0.604193 0.0997527 0.769917 -0.17952
+0.596375 0.139055 0.78001 -0.12878
+0.586004 0.177762 0.786763 -0.0774894
+0.573123 0.215708 0.790146 -0.0258667
+0.557788 0.25273 0.790146 0.0258667
+0.540064 0.28867 0.786763 0.0774894
+0.520028 0.323374 0.78001 0.12878
+0.497765 0.356693 0.769917 0.17952
+0.47337 0.388485 0.756528 0.22949
+0.446949 0.418613 0.739899 0.278478
+0.418613 0.446949 0.720101 0.326274
+0.388485 0.47337 0.69722 0.372672
+0.356693 0.497765 0.671353 0.417474
+0.323374 0.520028 0.642612 0.460489
+0.28867 0.540064 0.611118 0.501532
+0.25273 0.557788 0.577008 0.540427
+0.215708 0.573123 0.540427 0.577008
+0.177762 0.586004 0.501532 0.611118
+0.139055 0.596375 0.460489 0.642612
+0.0997526 0.604193 0.417474 0.671353
+0.0600229 0.609424 0.372672 0.69722
+0.0200362 0.612045 0.326273 0.720101
+-0.0200363 0.612045 0.278478 0.739899
+-0.060023 0.609424 0.22949 0.756528
+-0.0997527 0.604193 0.179519 0.769917
+-0.139055 0.596375 0.12878 0.78001
+-0.177762 0.586004 0.0774892 0.786763
+-0.215708 0.573123 0.0258666 0.790146
+-0.25273 0.557788 -0.0258668 0.790146
+-0.28867 0.540064 -0.0774893 0.786763
+-0.323374 0.520028 -0.12878 0.78001
+-0.356693 0.497765 -0.17952 0.769917
+-0.388485 0.47337 -0.22949 0.756528
+-0.418613 0.446949 -0.278478 0.739899
+-0.446949 0.418613 -0.326274 0.720101
+-0.47337 0.388485 -0.372672 0.69722
+-0.497765 0.356693 -0.417474 0.671353
+-0.520028 0.323374 -0.460489 0.642612
+-0.540064 0.28867 -0.501532 0.611118
+-0.557788 0.25273 -0.540427 0.577008
+-0.573123 0.215708 -0.577008 0.540427
+-0.586004 0.177762 -0.611118 0.501532
+-0.596375 0.139055 -0.642612 0.460489
+-0.604193 0.0997527 -0.671353 0.417474
+-0.609424 0.060023 -0.69722 0.372672
+-0.612045 0.0200363 -0.720101 0.326274
+0.645152 0.0211201 0.737738 -0.197676
+0.642389 0.0632698 0.749087 -0.149003
+0.636876 0.105149 0.757229 -0.099691
+0.628635 0.146577 0.762127 -0.0499525
+0.617702 0.187378 0.763763 5.69236e-10
+0.604125 0.227376 0.762127 0.0499525
+0.58796 0.266401 0.757229 0.099691
+0.569278 0.304285 0.749087 0.149003
+0.548158 0.340866 0.737738 0.197676
+0.52469 0.375988 0.72323 0.245503
+0.498976 0.409499 0.705625 0.292279
+0.471125 0.441257 0.684998 0.337804
+0.441257 0.471125 0.661438 0.381881
+0.409499 0.498976 0.635045 0.424324
+0.375988 0.52469 0.605934 0.464949
+0.340866 0.548158 0.574227 0.503584
+0.304285 0.569278 0.540062 0.540062
+0.266401 0.58796 0.503584 0.574227
+0.227376 0.604125 0.464949 0.605934
+0.187378 0.617702 0.424324 0.635045
+0.146577 0.628635 0.381881 0.661438
+0.105148 0.636876 0.337803 0.684998
+0.0632697 0.642389 0.292279 0.705625
+0.02112 0.645152 0.245503 0.72323
+-0.0211201 0.645152 0.197676 0.737738
+-0.0632698 0.642389 0.149003 0.749087
+-0.105149 0.636876 0.099691 0.757229
+-0.146577 0.628635 0.0499524 0.762127
+-0.187378 0.617702 -1.25002e-07 0.763763
+-0.227377 0.604125 -0.0499526 0.762127
+-0.266401 0.58796 -0.0996911 0.757229
+-0.304285 0.569278 -0.149003 0.749087
+-0.340866 0.548158 -0.197676 0.737738
+-0.375988 0.52469 -0.245504 0.72323
+-0.409499 0.498976 -0.292279 0.705625
+-0.441257 0.471125 -0.337804 0.684998
+-0.471125 0.441257 -0.381881 0.661438
+-0.498976 0.409499 -0.424324 0.635045
+-0.52469 0.375988 -0.464949 0.605934
+-0.548158 0.340866 -0.503584 0.574227
+-0.569278 0.304285 -0.540062 0.540062
+-0.58796 0.266401 -0.574227 0.503584
+-0.604125 0.227376 -0.605934 0.464949
+-0.617702 0.187378 -0.635045 0.424324
+-0.628635 0.146577 -0.661438 0.381881
+-0.636876 0.105149 -0.684998 0.337804
+-0.642389 0.0632698 -0.705625 0.292279
+-0.645152 0.0211201 -0.72323 0.245504
+0.645152 0.0211201 0.684998 -0.337804
+0.642389 0.0632698 0.705625 -0.292279
+0.636876 0.105149 0.72323 -0.245503
+0.628635 0.146577 0.737738 -0.197676
+0.617702 0.187378 0.749087 -0.149003
+0.604125 0.227376 0.757229 -0.099691
+0.58796 0.266401 0.762127 -0.0499525
+0.569278 0.304285 0.763763 1.61233e-08
+0.548158 0.340866 0.762127 0.0499525
+0.52469 0.375988 0.757229 0.099691
+0.498976 0.409499 0.749087 0.149003
+0.471125 0.441257 0.737738 0.197676
+0.441257 0.471125 0.72323 0.245503
+0.409499 0.498976 0.705625 0.292279
+0.375988 0.52469 0.684998 0.337804
+0.340866 0.548158 0.661438 0.381881
+0.304285 0.569278 0.635045 0.424324
+0.266401 0.58796 0.605934 0.464949
+0.227376 0.604125 0.574227 0.503584
+0.187378 0.617702 0.540062 0.540062
+0.146577 0.628635 0.503584 0.574227
+0.105148 0.636876 0.464949 0.605934
+0.0632697 0.642389 0.424324 0.635045
+0.02112 0.645152 0.381881 0.661438
+-0.0211201 0.645152 0.337804 0.684998
+-0.0632698 0.642389 0.292279 0.705625
+-0.105149 0.636876 0.245503 0.72323
+-0.146577 0.628635 0.197676 0.737738
+-0.187378 0.617702 0.149003 0.749087
+-0.227377 0.604125 0.0996909 0.757229
+-0.266401 0.58796 0.0499524 0.762127
+-0.304285 0.569278 8.70629e-08 0.763763
+-0.340866 0.548158 -0.0499525 0.762127
+-0.375988 0.52469 -0.0996911 0.757229
+-0.409499 0.498976 -0.149003 0.749087
+-0.441257 0.471125 -0.197676 0.737738
+-0.471125 0.441257 -0.245503 0.72323
+-0.498976 0.409499 -0.292279 0.705625
+-0.52469 0.375988 -0.337804 0.684998
+-0.548158 0.340866 -0.381881 0.661438
+-0.569278 0.304285 -0.424324 0.635045
+-0.58796 0.266401 -0.464949 0.605934
+-0.604125 0.227376 -0.503584 0.574227
+-0.617702 0.187378 -0.540062 0.540062
+-0.628635 0.146577 -0.574227 0.503584
+-0.636876 0.105149 -0.605934 0.464949
+-0.642389 0.0632698 -0.635045 0.424324
+-0.645152 0.0211201 -0.661438 0.381881
+0.676641 0.0221509 0.688808 -0.259249
+0.673743 0.0663579 0.704289 -0.213644
+0.667961 0.110281 0.716754 -0.167124
+0.659318 0.153731 0.72615 -0.119888
+0.647852 0.196524 0.732436 -0.0721387
+0.633611 0.238474 0.735586 -0.0240806
+0.616658 0.279404 0.735586 0.0240806
+0.597064 0.319137 0.732436 0.0721387
+0.574913 0.357504 0.72615 0.119888
+0.5503 0.394339 0.716754 0.167124
+0.523331 0.429486 0.704289 0.213644
+0.49412 0.462794 0.688808 0.259249
+0.462794 0.49412 0.670378 0.303744
+0.429486 0.523331 0.649076 0.346939
+0.394339 0.5503 0.624996 0.388647
+0.357504 0.574913 0.598239 0.428692
+0.319137 0.597064 0.56892 0.466901
+0.279404 0.616658 0.537165 0.50311
+0.238474 0.633611 0.50311 0.537165
+0.196524 0.647852 0.466901 0.56892
+0.153731 0.659318 0.428692 0.598239
+0.110281 0.667961 0.388647 0.624996
+0.0663578 0.673743 0.346939 0.649077
+0.0221508 0.676641 0.303744 0.670378
+-0.022151 0.676641 0.259249 0.688808
+-0.066358 0.673743 0.213644 0.704289
+-0.110281 0.667961 0.167124 0.716754
+-0.153731 0.659318 0.119888 0.72615
+-0.196524 0.647852 0.0721385 0.732436
+-0.238475 0.633611 0.0240805 0.735586
+-0.279404 0.616658 -0.0240807 0.735586
+-0.319137 0.597064 -0.0721386 0.732436
+-0.357504 0.574913 -0.119888 0.72615
+-0.394339 0.5503 -0.167124 0.716754
+-0.429486 0.523331 -0.213644 0.704289
+-0.462794 0.49412 -0.259249 0.688808
+-0.49412 0.462794 -0.303744 0.670378
+-0.523331 0.429486 -0.346939 0.649076
+-0.5503 0.394339 -0.388647 0.624996
+-0.574913 0.357504 -0.428692 0.598239
+-0.597063 0.319137 -0.466901 0.56892
+-0.616658 0.279404 -0.50311 0.537165
+-0.633611 0.238474 -0.537165 0.50311
+-0.647852 0.196524 -0.56892 0.466901
+-0.659318 0.153731 -0.598239 0.428692
+-0.667961 0.110281 -0.624996 0.388647
+-0.673743 0.0663579 -0.649076 0.346939
+-0.676641 0.022151 -0.670378 0.303744
+0.676641 0.0221509 0.72615 -0.119888
+0.673743 0.0663579 0.732436 -0.0721387
+0.667961 0.110281 0.735586 -0.0240806
+0.659318 0.153731 0.735586 0.0240806
+0.647852 0.196524 0.732436 0.0721387
+0.633611 0.238474 0.72615 0.119888
+0.616658 0.279404 0.716754 0.167124
+0.597064 0.319137 0.704289 0.213644
+0.574913 0.357504 0.688808 0.259249
+0.5503 0.394339 0.670378 0.303744
+0.523331 0.429486 0.649076 0.346939
+0.49412 0.462794 0.624996 0.388647
+0.462794 0.49412 0.598239 0.428692
+0.429486 0.523331 0.56892 0.466901
+0.394339 0.5503 0.537165 0.50311
+0.357504 0.574913 0.50311 0.537165
+0.319137 0.597064 0.466901 0.56892
+0.279404 0.616658 0.428692 0.598239
+0.238474 0.633611 0.388647 0.624996
+0.196524 0.647852 0.346939 0.649076
+0.153731 0.659318 0.303744 0.670378
+0.110281 0.667961 0.259249 0.688808
+0.0663578 0.673743 0.213644 0.704289
+0.0221508 0.676641 0.167124 0.716754
+-0.022151 0.676641 0.119888 0.72615
+-0.066358 0.673743 0.0721386 0.732436
+-0.110281 0.667961 0.0240805 0.735586
+-0.153731 0.659318 -0.0240806 0.735586
+-0.196524 0.647852 -0.0721388 0.732436
+-0.238475 0.633611 -0.119888 0.72615
+-0.279404 0.616658 -0.167124 0.716754
+-0.319137 0.597064 -0.213644 0.704289
+-0.357504 0.574913 -0.259249 0.688808
+-0.394339 0.5503 -0.303744 0.670378
+-0.429486 0.523331 -0.346939 0.649076
+-0.462794 0.49412 -0.388647 0.624996
+-0.49412 0.462794 -0.428692 0.598239
+-0.523331 0.429486 -0.466901 0.56892
+-0.5503 0.394339 -0.50311 0.537165
+-0.574913 0.357504 -0.537165 0.50311
+-0.597063 0.319137 -0.56892 0.466901
+-0.616658 0.279404 -0.598239 0.428692
+-0.633611 0.238474 -0.624996 0.388647
+-0.647852 0.196524 -0.649076 0.346939
+-0.659318 0.153731 -0.670378 0.303744
+-0.667961 0.110281 -0.688808 0.259249
+-0.673743 0.0663579 -0.704289 0.213644
+-0.676641 0.022151 -0.716754 0.167124
+0.706728 0.0231359 0.705593 -0.046247
+0.703702 0.0693086 0.707107 1.93179e-09
+0.697662 0.115184 0.705593 0.046247
+0.688635 0.160567 0.701057 0.092296
+0.676659 0.205262 0.69352 0.13795
+0.661785 0.249078 0.683013 0.183013
+0.644078 0.291828 0.669581 0.227292
+0.623612 0.333328 0.653281 0.270598
+0.600477 0.3734 0.634185 0.312745
+0.574769 0.411874 0.612372 0.353553
+0.546601 0.448584 0.587938 0.392847
+0.516092 0.483373 0.560986 0.430459
+0.483373 0.516092 0.531631 0.466228
+0.448584 0.546601 0.5 0.5
+0.411874 0.574769 0.466228 0.531631
+0.3734 0.600477 0.430459 0.560986
+0.333328 0.623613 0.392847 0.587938
+0.291828 0.644078 0.353553 0.612372
+0.249078 0.661785 0.312745 0.634185
+0.205262 0.676659 0.270598 0.653281
+0.160567 0.688635 0.227292 0.669581
+0.115184 0.697662 0.183013 0.683013
+0.0693085 0.703702 0.13795 0.69352
+0.0231358 0.706728 0.0922959 0.701057
+-0.023136 0.706728 0.0462469 0.705593
+-0.0693086 0.703702 -5.91822e-08 0.707107
+-0.115185 0.697662 -0.046247 0.705593
+-0.160567 0.688635 -0.092296 0.701057
+-0.205262 0.676659 -0.13795 0.69352
+-0.249078 0.661785 -0.183013 0.683013
+-0.291828 0.644078 -0.227292 0.669581
+-0.333328 0.623613 -0.270598 0.653282
+-0.3734 0.600477 -0.312745 0.634185
+-0.411874 0.574769 -0.353553 0.612372
+-0.448584 0.546601 -0.392847 0.587938
+-0.483373 0.516092 -0.430459 0.560985
+-0.516092 0.483373 -0.466228 0.531631
+-0.546601 0.448584 -0.5 0.5
+-0.574769 0.411874 -0.531631 0.466228
+-0.600477 0.3734 -0.560986 0.430459
+-0.623612 0.333328 -0.587938 0.392848
+-0.644078 0.291828 -0.612372 0.353553
+-0.661785 0.249078 -0.634185 0.312745
+-0.676659 0.205262 -0.653281 0.270598
+-0.688635 0.160567 -0.669581 0.227292
+-0.697662 0.115185 -0.683013 0.183013
+-0.703702 0.0693086 -0.69352 0.13795
+-0.706728 0.0231359 -0.701057 0.092296
+0.706728 0.0231359 0.683013 -0.183013
+0.703702 0.0693086 0.69352 -0.13795
+0.697662 0.115184 0.701057 -0.092296
+0.688635 0.160567 0.705593 -0.046247
+0.676659 0.205262 0.707107 5.2701e-10
+0.661785 0.249078 0.705593 0.046247
+0.644078 0.291828 0.701057 0.092296
+0.623612 0.333328 0.69352 0.13795
+0.600477 0.3734 0.683013 0.183013
+0.574769 0.411874 0.669581 0.227292
+0.546601 0.448584 0.653281 0.270598
+0.516092 0.483373 0.634185 0.312745
+0.483373 0.516092 0.612372 0.353553
+0.448584 0.546601 0.587938 0.392847
+0.411874 0.574769 0.560986 0.430459
+0.3734 0.600477 0.531631 0.466228
+0.333328 0.623613 0.5 0.5
+0.291828 0.644078 0.466228 0.531631
+0.249078 0.661785 0.430459 0.560986
+0.205262 0.676659 0.392847 0.587938
+0.160567 0.688635 0.353553 0.612372
+0.115184 0.697662 0.312745 0.634185
+0.0693085 0.703702 0.270598 0.653282
+0.0231358 0.706728 0.227292 0.669581
+-0.023136 0.706728 0.183013 0.683013
+-0.0693086 0.703702 0.13795 0.69352
+-0.115185 0.697662 0.0922959 0.701057
+-0.160567 0.688635 0.046247 0.705593
+-0.205262 0.676659 -1.15729e-07 0.707107
+-0.249078 0.661785 -0.0462471 0.705593
+-0.291828 0.644078 -0.0922961 0.701057
+-0.333328 0.623613 -0.13795 0.69352
+-0.3734 0.600477 -0.183013 0.683013
+-0.411874 0.574769 -0.227292 0.669581
+-0.448584 0.546601 -0.270598 0.653281
+-0.483373 0.516092 -0.312745 0.634185
+-0.516092 0.483373 -0.353553 0.612372
+-0.546601 0.448584 -0.392848 0.587938
+-0.574769 0.411874 -0.430459 0.560986
+-0.600477 0.3734 -0.466228 0.531631
+-0.623612 0.333328 -0.5 0.5
+-0.644078 0.291828 -0.531631 0.466228
+-0.661785 0.249078 -0.560986 0.430459
+-0.676659 0.205262 -0.587938 0.392847
+-0.688635 0.160567 -0.612372 0.353553
+-0.697662 0.115185 -0.634185 0.312745
+-0.703702 0.0693086 -0.653281 0.270598
+-0.706728 0.0231359 -0.669581 0.227292
+0.735586 0.0240806 0.667961 -0.110281
+0.732436 0.0721387 0.673743 -0.0663579
+0.72615 0.119888 0.676641 -0.0221509
+0.716754 0.167124 0.676641 0.0221509
+0.704289 0.213644 0.673743 0.0663579
+0.688808 0.259249 0.667961 0.110281
+0.670378 0.303744 0.659318 0.153731
+0.649076 0.346939 0.647852 0.196524
+0.624996 0.388647 0.633611 0.238474
+0.598239 0.428692 0.616658 0.279404
+0.56892 0.466901 0.597064 0.319137
+0.537165 0.50311 0.574913 0.357504
+0.50311 0.537165 0.5503 0.394339
+0.466901 0.56892 0.523331 0.429486
+0.428692 0.598239 0.49412 0.462794
+0.388647 0.624996 0.462794 0.49412
+0.346939 0.649077 0.429486 0.523331
+0.303744 0.670378 0.394339 0.5503
+0.259249 0.688808 0.357504 0.574913
+0.213644 0.704289 0.319137 0.597064
+0.167124 0.716754 0.279404 0.616658
+0.119888 0.72615 0.238474 0.633611
+0.0721386 0.732436 0.196524 0.647852
+0.0240805 0.735586 0.153731 0.659318
+-0.0240807 0.735586 0.110281 0.667961
+-0.0721387 0.732436 0.0663579 0.673743
+-0.119888 0.72615 0.0221509 0.676641
+-0.167124 0.716754 -0.022151 0.676641
+-0.213644 0.704289 -0.066358 0.673743
+-0.259249 0.688808 -0.110281 0.667961
+-0.303744 0.670378 -0.153731 0.659318
+-0.346939 0.649077 -0.196524 0.647852
+-0.388647 0.624996 -0.238474 0.633611
+-0.428692 0.598239 -0.279404 0.616658
+-0.466901 0.56892 -0.319137 0.597064
+-0.50311 0.537165 -0.357504 0.574913
+-0.537165 0.50311 -0.394339 0.5503
+-0.56892 0.466901 -0.429486 0.523331
+-0.598239 0.428692 -0.462794 0.49412
+-0.624996 0.388647 -0.49412 0.462794
+-0.649076 0.346939 -0.523331 0.429486
+-0.670378 0.303744 -0.5503 0.394339
+-0.688808 0.259249 -0.574913 0.357504
+-0.704289 0.213644 -0.597064 0.319137
+-0.716754 0.167124 -0.616658 0.279404
+-0.72615 0.119888 -0.633611 0.238474
+-0.732436 0.0721386 -0.647852 0.196524
+-0.735586 0.0240807 -0.659318 0.153731
+0.676641 0.0221509 0.624996 -0.388647
+0.673743 0.0663579 0.649076 -0.346939
+0.667961 0.110281 0.670378 -0.303744
+0.659318 0.153731 0.688808 -0.259249
+0.647852 0.196524 0.704289 -0.213644
+0.633611 0.238474 0.716754 -0.167124
+0.616658 0.279404 0.72615 -0.119888
+0.597064 0.319137 0.732436 -0.0721386
+0.574913 0.357504 0.735586 -0.0240806
+0.5503 0.394339 0.735586 0.0240806
+0.523331 0.429486 0.732436 0.0721386
+0.49412 0.462794 0.72615 0.119888
+0.462794 0.49412 0.716754 0.167124
+0.429486 0.523331 0.704289 0.213644
+0.394339 0.5503 0.688808 0.259249
+0.357504 0.574913 0.670378 0.303744
+0.319137 0.597064 0.649076 0.346939
+0.279404 0.616658 0.624996 0.388647
+0.238474 0.633611 0.598239 0.428692
+0.196524 0.647852 0.56892 0.466901
+0.153731 0.659318 0.537165 0.50311
+0.110281 0.667961 0.50311 0.537165
+0.0663578 0.673743 0.466901 0.56892
+0.0221508 0.676641 0.428692 0.598239
+-0.022151 0.676641 0.388647 0.624996
+-0.066358 0.673743 0.346939 0.649077
+-0.110281 0.667961 0.303744 0.670378
+-0.153731 0.659318 0.259249 0.688808
+-0.196524 0.647852 0.213644 0.704289
+-0.238475 0.633611 0.167123 0.716754
+-0.279404 0.616658 0.119888 0.72615
+-0.319137 0.597064 0.0721387 0.732436
+-0.357504 0.574913 0.0240806 0.735586
+-0.394339 0.5503 -0.0240807 0.735586
+-0.429486 0.523331 -0.0721386 0.732436
+-0.462794 0.49412 -0.119888 0.72615
+-0.49412 0.462794 -0.167124 0.716754
+-0.523331 0.429486 -0.213644 0.704289
+-0.5503 0.394339 -0.259249 0.688808
+-0.574913 0.357504 -0.303744 0.670378
+-0.597063 0.319137 -0.346939 0.649077
+-0.616658 0.279404 -0.388647 0.624996
+-0.633611 0.238474 -0.428692 0.598239
+-0.647852 0.196524 -0.466901 0.56892
+-0.659318 0.153731 -0.50311 0.537165
+-0.667961 0.110281 -0.537165 0.50311
+-0.673743 0.0663579 -0.56892 0.466901
+-0.676641 0.022151 -0.598239 0.428692
+0.706728 0.0231359 0.634185 -0.312745
+0.703702 0.0693086 0.653281 -0.270598
+0.697662 0.115184 0.669581 -0.227292
+0.688635 0.160567 0.683013 -0.183013
+0.676659 0.205262 0.69352 -0.13795
+0.661785 0.249078 0.701057 -0.092296
+0.644078 0.291828 0.705593 -0.046247
+0.623612 0.333328 0.707107 1.49273e-08
+0.600477 0.3734 0.705593 0.046247
+0.574769 0.411874 0.701057 0.092296
+0.546601 0.448584 0.69352 0.13795
+0.516092 0.483373 0.683013 0.183013
+0.483373 0.516092 0.669581 0.227292
+0.448584 0.546601 0.653281 0.270598
+0.411874 0.574769 0.634185 0.312745
+0.3734 0.600477 0.612372 0.353553
+0.333328 0.623613 0.587938 0.392848
+0.291828 0.644078 0.560985 0.430459
+0.249078 0.661785 0.531631 0.466228
+0.205262 0.676659 0.5 0.5
+0.160567 0.688635 0.466228 0.531631
+0.115184 0.697662 0.430459 0.560986
+0.0693085 0.703702 0.392847 0.587938
+0.0231358 0.706728 0.353553 0.612372
+-0.023136 0.706728 0.312745 0.634185
+-0.0693086 0.703702 0.270598 0.653282
+-0.115185 0.697662 0.227292 0.669581
+-0.160567 0.688635 0.183013 0.683013
+-0.205262 0.676659 0.13795 0.69352
+-0.249078 0.661785 0.0922959 0.701057
+-0.291828 0.644078 0.0462469 0.705593
+-0.333328 0.623613 8.06046e-08 0.707107
+-0.3734 0.600477 -0.046247 0.705593
+-0.411874 0.574769 -0.092296 0.701057
+-0.448584 0.546601 -0.13795 0.69352
+-0.483373 0.516092 -0.183013 0.683013
+-0.516092 0.483373 -0.227292 0.669581
+-0.546601 0.448584 -0.270598 0.653281
+-0.574769 0.411874 -0.312745 0.634185
+-0.600477 0.3734 -0.353553 0.612372
+-0.623612 0.333328 -0.392847 0.587938
+-0.644078 0.291828 -0.430459 0.560986
+-0.661785 0.249078 -0.466228 0.531631
+-0.676659 0.205262 -0.5 0.5
+-0.688635 0.160567 -0.531631 0.466228
+-0.697662 0.115185 -0.560986 0.430459
+-0.703702 0.0693086 -0.587938 0.392847
+-0.706728 0.0231359 -0.612372 0.353553
+0.706728 0.0231359 0.560986 -0.430459
+0.703702 0.0693086 0.587938 -0.392847
+0.697662 0.115184 0.612372 -0.353553
+0.688635 0.160567 0.634185 -0.312745
+0.676659 0.205262 0.653281 -0.270598
+0.661785 0.249078 0.669581 -0.227292
+0.644078 0.291828 0.683013 -0.183013
+0.623612 0.333328 0.69352 -0.13795
+0.600477 0.3734 0.701057 -0.092296
+0.574769 0.411874 0.705593 -0.046247
+0.546601 0.448584 0.707107 -1.28193e-08
+0.516092 0.483373 0.705593 0.046247
+0.483373 0.516092 0.701057 0.092296
+0.448584 0.546601 0.69352 0.13795
+0.411874 0.574769 0.683013 0.183013
+0.3734 0.600477 0.669581 0.227292
+0.333328 0.623613 0.653281 0.270598
+0.291828 0.644078 0.634185 0.312745
+0.249078 0.661785 0.612372 0.353553
+0.205262 0.676659 0.587938 0.392848
+0.160567 0.688635 0.560986 0.430459
+0.115184 0.697662 0.531631 0.466228
+0.0693085 0.703702 0.5 0.5
+0.0231358 0.706728 0.466228 0.531631
+-0.023136 0.706728 0.430459 0.560986
+-0.0693086 0.703702 0.392847 0.587938
+-0.115185 0.697662 0.353553 0.612372
+-0.160567 0.688635 0.312745 0.634185
+-0.205262 0.676659 0.270598 0.653282
+-0.249078 0.661785 0.227292 0.669581
+-0.291828 0.644078 0.183013 0.683013
+-0.333328 0.623613 0.13795 0.69352
+-0.3734 0.600477 0.092296 0.701057
+-0.411874 0.574769 0.0462469 0.705593
+-0.448584 0.546601 2.40575e-08 0.707107
+-0.483373 0.516092 -0.046247 0.705593
+-0.516092 0.483373 -0.0922959 0.701057
+-0.546601 0.448584 -0.13795 0.69352
+-0.574769 0.411874 -0.183013 0.683013
+-0.600477 0.3734 -0.227292 0.669581
+-0.623612 0.333328 -0.270598 0.653282
+-0.644078 0.291828 -0.312745 0.634185
+-0.661785 0.249078 -0.353553 0.612372
+-0.676659 0.205262 -0.392847 0.587938
+-0.688635 0.160567 -0.430459 0.560985
+-0.697662 0.115185 -0.466228 0.531631
+-0.703702 0.0693086 -0.5 0.5
+-0.706728 0.0231359 -0.531631 0.466228
+0.735586 0.0240806 0.574913 -0.357504
+0.732436 0.0721387 0.597064 -0.319137
+0.72615 0.119888 0.616658 -0.279404
+0.716754 0.167124 0.633611 -0.238474
+0.704289 0.213644 0.647852 -0.196524
+0.688808 0.259249 0.659318 -0.153731
+0.670378 0.303744 0.667961 -0.110281
+0.649076 0.346939 0.673743 -0.0663579
+0.624996 0.388647 0.676641 -0.0221509
+0.598239 0.428692 0.676641 0.0221509
+0.56892 0.466901 0.673743 0.0663579
+0.537165 0.50311 0.667961 0.110281
+0.50311 0.537165 0.659318 0.153731
+0.466901 0.56892 0.647852 0.196524
+0.428692 0.598239 0.633611 0.238474
+0.388647 0.624996 0.616658 0.279404
+0.346939 0.649077 0.597063 0.319137
+0.303744 0.670378 0.574913 0.357504
+0.259249 0.688808 0.5503 0.394339
+0.213644 0.704289 0.523331 0.429486
+0.167124 0.716754 0.49412 0.462794
+0.119888 0.72615 0.462794 0.49412
+0.0721386 0.732436 0.429486 0.523331
+0.0240805 0.735586 0.394339 0.5503
+-0.0240807 0.735586 0.357504 0.574913
+-0.0721387 0.732436 0.319137 0.597064
+-0.119888 0.72615 0.279404 0.616658
+-0.167124 0.716754 0.238474 0.633611
+-0.213644 0.704289 0.196524 0.647852
+-0.259249 0.688808 0.153731 0.659318
+-0.303744 0.670378 0.110281 0.667961
+-0.346939 0.649077 0.066358 0.673743
+-0.388647 0.624996 0.0221509 0.676641
+-0.428692 0.598239 -0.022151 0.676641
+-0.466901 0.56892 -0.0663579 0.673743
+-0.50311 0.537165 -0.110281 0.667961
+-0.537165 0.50311 -0.153731 0.659318
+-0.56892 0.466901 -0.196524 0.647852
+-0.598239 0.428692 -0.238474 0.633611
+-0.624996 0.388647 -0.279404 0.616658
+-0.649076 0.346939 -0.319137 0.597064
+-0.670378 0.303744 -0.357504 0.574913
+-0.688808 0.259249 -0.394339 0.5503
+-0.704289 0.213644 -0.429486 0.523331
+-0.716754 0.167124 -0.462794 0.49412
+-0.72615 0.119888 -0.49412 0.462794
+-0.732436 0.0721386 -0.523331 0.429486
+-0.735586 0.0240807 -0.5503 0.394339
+0.735586 0.0240806 0.633611 -0.238474
+0.732436 0.0721387 0.647852 -0.196524
+0.72615 0.119888 0.659318 -0.153731
+0.716754 0.167124 0.667961 -0.110281
+0.704289 0.213644 0.673743 -0.0663579
+0.688808 0.259249 0.676641 -0.0221509
+0.670378 0.303744 0.676641 0.0221509
+0.649076 0.346939 0.673743 0.0663579
+0.624996 0.388647 0.667961 0.110281
+0.598239 0.428692 0.659318 0.153731
+0.56892 0.466901 0.647852 0.196524
+0.537165 0.50311 0.633611 0.238474
+0.50311 0.537165 0.616658 0.279404
+0.466901 0.56892 0.597064 0.319137
+0.428692 0.598239 0.574913 0.357504
+0.388647 0.624996 0.5503 0.394339
+0.346939 0.649077 0.523331 0.429486
+0.303744 0.670378 0.49412 0.462794
+0.259249 0.688808 0.462794 0.49412
+0.213644 0.704289 0.429486 0.523331
+0.167124 0.716754 0.394339 0.5503
+0.119888 0.72615 0.357504 0.574913
+0.0721386 0.732436 0.319137 0.597064
+0.0240805 0.735586 0.279404 0.616658
+-0.0240807 0.735586 0.238474 0.633611
+-0.0721387 0.732436 0.196524 0.647852
+-0.119888 0.72615 0.153731 0.659318
+-0.167124 0.716754 0.110281 0.667961
+-0.213644 0.704289 0.0663578 0.673743
+-0.259249 0.688808 0.0221508 0.676641
+-0.303744 0.670378 -0.022151 0.676641
+-0.346939 0.649077 -0.0663578 0.673743
+-0.388647 0.624996 -0.110281 0.667961
+-0.428692 0.598239 -0.153731 0.659318
+-0.466901 0.56892 -0.196524 0.647852
+-0.50311 0.537165 -0.238474 0.633611
+-0.537165 0.50311 -0.279404 0.616658
+-0.56892 0.466901 -0.319137 0.597064
+-0.598239 0.428692 -0.357504 0.574913
+-0.624996 0.388647 -0.394339 0.5503
+-0.649076 0.346939 -0.429486 0.523331
+-0.670378 0.303744 -0.462794 0.49412
+-0.688808 0.259249 -0.49412 0.462794
+-0.704289 0.213644 -0.523331 0.429486
+-0.716754 0.167124 -0.5503 0.394339
+-0.72615 0.119888 -0.574913 0.357504
+-0.732436 0.0721386 -0.597064 0.319137
+-0.735586 0.0240807 -0.616658 0.279404
+0.763354 0.0249896 0.623502 -0.167067
+0.760085 0.0748618 0.633094 -0.12593
+0.753561 0.124413 0.639975 -0.0842543
+0.743811 0.173432 0.644115 -0.0422175
+0.730875 0.221709 0.645497 4.81092e-10
+0.71481 0.269035 0.644115 0.0422175
+0.695684 0.31521 0.639975 0.0842543
+0.673578 0.360035 0.633094 0.12593
+0.648589 0.403318 0.623502 0.167067
+0.620822 0.444875 0.611241 0.207488
+0.590396 0.484526 0.596362 0.247021
+0.557443 0.522102 0.578929 0.285496
+0.522102 0.557443 0.559017 0.322749
+0.484526 0.590396 0.536711 0.358619
+0.444875 0.620822 0.512107 0.392954
+0.403318 0.648589 0.48531 0.425606
+0.360035 0.673579 0.456435 0.456436
+0.31521 0.695684 0.425606 0.485311
+0.269035 0.71481 0.392954 0.512107
+0.221709 0.730875 0.358619 0.536711
+0.173432 0.743811 0.322749 0.559017
+0.124413 0.753561 0.285496 0.578929
+0.0748617 0.760085 0.247021 0.596362
+0.0249895 0.763354 0.207488 0.611241
+-0.0249897 0.763354 0.167067 0.623502
+-0.0748619 0.760085 0.12593 0.633094
+-0.124414 0.753561 0.0842542 0.639975
+-0.173432 0.743811 0.0422175 0.644115
+-0.221709 0.730875 -1.05646e-07 0.645497
+-0.269036 0.71481 -0.0422176 0.644115
+-0.31521 0.695684 -0.0842544 0.639975
+-0.360035 0.673579 -0.12593 0.633094
+-0.403318 0.648589 -0.167067 0.623502
+-0.444875 0.620822 -0.207488 0.611241
+-0.484526 0.590397 -0.247021 0.596362
+-0.522102 0.557443 -0.285496 0.578929
+-0.557443 0.522102 -0.322749 0.559017
+-0.590397 0.484526 -0.358619 0.536711
+-0.620822 0.444875 -0.392954 0.512107
+-0.648589 0.403318 -0.425606 0.485311
+-0.673578 0.360035 -0.456435 0.456436
+-0.695684 0.31521 -0.485311 0.425606
+-0.71481 0.269035 -0.512107 0.392954
+-0.730875 0.221709 -0.536711 0.358619
+-0.743811 0.173432 -0.559017 0.322749
+-0.753561 0.124414 -0.578929 0.285496
+-0.760085 0.0748618 -0.596362 0.247021
+-0.763354 0.0249897 -0.611241 0.207488
+0.763354 0.0249896 0.578929 -0.285496
+0.760085 0.0748618 0.596362 -0.247021
+0.753561 0.124413 0.611241 -0.207488
+0.743811 0.173432 0.623502 -0.167067
+0.730875 0.221709 0.633094 -0.12593
+0.71481 0.269035 0.639975 -0.0842543
+0.695684 0.31521 0.644115 -0.0422175
+0.673578 0.360035 0.645497 1.36267e-08
+0.648589 0.403318 0.644115 0.0422175
+0.620822 0.444875 0.639975 0.0842543
+0.590396 0.484526 0.633094 0.12593
+0.557443 0.522102 0.623502 0.167067
+0.522102 0.557443 0.611241 0.207488
+0.484526 0.590396 0.596362 0.247021
+0.444875 0.620822 0.578929 0.285496
+0.403318 0.648589 0.559017 0.322749
+0.360035 0.673579 0.536711 0.358619
+0.31521 0.695684 0.512107 0.392954
+0.269035 0.71481 0.48531 0.425606
+0.221709 0.730875 0.456435 0.456435
+0.173432 0.743811 0.425606 0.485311
+0.124413 0.753561 0.392954 0.512107
+0.0748617 0.760085 0.358619 0.536711
+0.0249895 0.763354 0.322749 0.559017
+-0.0249897 0.763354 0.285496 0.578929
+-0.0748619 0.760085 0.247021 0.596362
+-0.124414 0.753561 0.207488 0.611241
+-0.173432 0.743811 0.167067 0.623502
+-0.221709 0.730875 0.12593 0.633094
+-0.269036 0.71481 0.0842542 0.639975
+-0.31521 0.695684 0.0422174 0.644115
+-0.360035 0.673579 7.35816e-08 0.645497
+-0.403318 0.648589 -0.0422175 0.644115
+-0.444875 0.620822 -0.0842544 0.639975
+-0.484526 0.590397 -0.12593 0.633094
+-0.522102 0.557443 -0.167067 0.623502
+-0.557443 0.522102 -0.207488 0.611241
+-0.590397 0.484526 -0.247021 0.596362
+-0.620822 0.444875 -0.285496 0.578929
+-0.648589 0.403318 -0.322749 0.559017
+-0.673578 0.360035 -0.358619 0.536711
+-0.695684 0.31521 -0.392954 0.512107
+-0.71481 0.269035 -0.425606 0.48531
+-0.730875 0.221709 -0.456435 0.456435
+-0.743811 0.173432 -0.485311 0.425606
+-0.753561 0.124414 -0.512107 0.392954
+-0.760085 0.0748618 -0.536711 0.358619
+-0.763354 0.0249897 -0.559017 0.322749
+0.790146 0.0258667 0.573123 -0.215708
+0.786763 0.0774894 0.586004 -0.177762
+0.78001 0.12878 0.596375 -0.139055
+0.769917 0.17952 0.604193 -0.0997527
+0.756528 0.22949 0.609424 -0.060023
+0.739899 0.278478 0.612045 -0.0200363
+0.720101 0.326274 0.612045 0.0200363
+0.69722 0.372672 0.609424 0.060023
+0.671353 0.417474 0.604193 0.0997527
+0.642612 0.460489 0.596375 0.139055
+0.611118 0.501532 0.586004 0.177762
+0.577008 0.540427 0.573123 0.215708
+0.540427 0.577008 0.557788 0.25273
+0.501532 0.611118 0.540064 0.28867
+0.460489 0.642612 0.520028 0.323374
+0.417474 0.671353 0.497765 0.356693
+0.372672 0.69722 0.47337 0.388485
+0.326274 0.720101 0.446949 0.418613
+0.278478 0.739899 0.418613 0.446949
+0.22949 0.756528 0.388485 0.47337
+0.17952 0.769917 0.356693 0.497765
+0.12878 0.78001 0.323374 0.520028
+0.0774893 0.786763 0.28867 0.540064
+0.0258666 0.790146 0.25273 0.557788
+-0.0258668 0.790146 0.215708 0.573123
+-0.0774894 0.786763 0.177762 0.586004
+-0.12878 0.78001 0.139055 0.596375
+-0.17952 0.769917 0.0997527 0.604193
+-0.22949 0.756528 0.0600229 0.609424
+-0.278478 0.739899 0.0200362 0.612045
+-0.326274 0.720101 -0.0200363 0.612045
+-0.372672 0.69722 -0.0600229 0.609424
+-0.417474 0.671353 -0.0997527 0.604193
+-0.460489 0.642612 -0.139055 0.596375
+-0.501532 0.611118 -0.177762 0.586004
+-0.540427 0.577008 -0.215708 0.573123
+-0.577008 0.540427 -0.25273 0.557788
+-0.611118 0.501532 -0.28867 0.540064
+-0.642612 0.460489 -0.323374 0.520028
+-0.671353 0.417474 -0.356693 0.497765
+-0.69722 0.372672 -0.388485 0.47337
+-0.720101 0.326274 -0.418613 0.446949
+-0.739899 0.278478 -0.446949 0.418613
+-0.756528 0.22949 -0.47337 0.388485
+-0.769917 0.179519 -0.497765 0.356693
+-0.78001 0.12878 -0.520028 0.323374
+-0.786763 0.0774893 -0.540064 0.28867
+-0.790146 0.0258668 -0.557788 0.25273
+0.735586 0.0240806 0.676641 0.0221509
+0.732436 0.0721387 0.673743 0.0663579
+0.72615 0.119888 0.667961 0.110281
+0.716754 0.167124 0.659318 0.153731
+0.704289 0.213644 0.647852 0.196524
+0.688808 0.259249 0.633611 0.238474
+0.670378 0.303744 0.616658 0.279404
+0.649076 0.346939 0.597064 0.319137
+0.624996 0.388647 0.574913 0.357504
+0.598239 0.428692 0.5503 0.394339
+0.56892 0.466901 0.523331 0.429486
+0.537165 0.50311 0.49412 0.462794
+0.50311 0.537165 0.462794 0.49412
+0.466901 0.56892 0.429486 0.523331
+0.428692 0.598239 0.394339 0.5503
+0.388647 0.624996 0.357504 0.574913
+0.346939 0.649077 0.319137 0.597064
+0.303744 0.670378 0.279404 0.616658
+0.259249 0.688808 0.238474 0.633611
+0.213644 0.704289 0.196524 0.647852
+0.167124 0.716754 0.153731 0.659318
+0.119888 0.72615 0.110281 0.667961
+0.0721386 0.732436 0.0663578 0.673743
+0.0240805 0.735586 0.0221508 0.676641
+-0.0240807 0.735586 -0.022151 0.676641
+-0.0721387 0.732436 -0.066358 0.673743
+-0.119888 0.72615 -0.110281 0.667961
+-0.167124 0.716754 -0.153731 0.659318
+-0.213644 0.704289 -0.196524 0.647852
+-0.259249 0.688808 -0.238475 0.633611
+-0.303744 0.670378 -0.279404 0.616658
+-0.346939 0.649077 -0.319137 0.597064
+-0.388647 0.624996 -0.357504 0.574913
+-0.428692 0.598239 -0.394339 0.5503
+-0.466901 0.56892 -0.429486 0.523331
+-0.50311 0.537165 -0.462794 0.49412
+-0.537165 0.50311 -0.49412 0.462794
+-0.56892 0.466901 -0.523331 0.429486
+-0.598239 0.428692 -0.5503 0.394339
+-0.624996 0.388647 -0.574913 0.357504
+-0.649076 0.346939 -0.597063 0.319137
+-0.670378 0.303744 -0.616658 0.279404
+-0.688808 0.259249 -0.633611 0.238474
+-0.704289 0.213644 -0.647852 0.196524
+-0.716754 0.167124 -0.659318 0.153731
+-0.72615 0.119888 -0.667961 0.110281
+-0.732436 0.0721386 -0.673743 0.0663579
+-0.735586 0.0240807 -0.676641 0.022151
+0.763354 0.0249896 0.639975 0.0842543
+0.760085 0.0748618 0.633094 0.12593
+0.753561 0.124413 0.623502 0.167067
+0.743811 0.173432 0.611241 0.207488
+0.730875 0.221709 0.596362 0.247021
+0.71481 0.269035 0.578929 0.285496
+0.695684 0.31521 0.559017 0.322749
+0.673578 0.360035 0.536711 0.358619
+0.648589 0.403318 0.512107 0.392954
+0.620822 0.444875 0.485311 0.425606
+0.590396 0.484526 0.456435 0.456435
+0.557443 0.522102 0.425606 0.485311
+0.522102 0.557443 0.392954 0.512107
+0.484526 0.590396 0.358619 0.536711
+0.444875 0.620822 0.322749 0.559017
+0.403318 0.648589 0.285496 0.578929
+0.360035 0.673579 0.247021 0.596362
+0.31521 0.695684 0.207488 0.611241
+0.269035 0.71481 0.167067 0.623502
+0.221709 0.730875 0.12593 0.633094
+0.173432 0.743811 0.0842543 0.639975
+0.124413 0.753561 0.0422175 0.644115
+0.0748617 0.760085 -7.93547e-08 0.645497
+0.0249895 0.763354 -0.0422176 0.644115
+-0.0249897 0.763354 -0.0842544 0.639975
+-0.0748619 0.760085 -0.12593 0.633094
+-0.124414 0.753561 -0.167067 0.623502
+-0.173432 0.743811 -0.207488 0.611241
+-0.221709 0.730875 -0.247021 0.596362
+-0.269036 0.71481 -0.285496 0.578929
+-0.31521 0.695684 -0.322749 0.559017
+-0.360035 0.673579 -0.358619 0.536711
+-0.403318 0.648589 -0.392954 0.512107
+-0.444875 0.620822 -0.425606 0.48531
+-0.484526 0.590397 -0.456435 0.456435
+-0.522102 0.557443 -0.485311 0.425606
+-0.557443 0.522102 -0.512107 0.392954
+-0.590397 0.484526 -0.536711 0.358619
+-0.620822 0.444875 -0.559017 0.322749
+-0.648589 0.403318 -0.578929 0.285496
+-0.673578 0.360035 -0.596362 0.247021
+-0.695684 0.31521 -0.611241 0.207488
+-0.71481 0.269035 -0.623502 0.167067
+-0.730875 0.221709 -0.633094 0.12593
+-0.743811 0.173432 -0.639975 0.0842542
+-0.753561 0.124414 -0.644115 0.0422176
+-0.760085 0.0748618 -0.645497 -3.0621e-08
+-0.763354 0.0249897 -0.644115 -0.0422175
+0.763354 0.0249896 0.644115 -0.0422175
+0.760085 0.0748618 0.645497 1.76347e-09
+0.753561 0.124413 0.644115 0.0422175
+0.743811 0.173432 0.639975 0.0842543
+0.730875 0.221709 0.633094 0.12593
+0.71481 0.269035 0.623502 0.167067
+0.695684 0.31521 0.611241 0.207488
+0.673578 0.360035 0.596362 0.247021
+0.648589 0.403318 0.578929 0.285496
+0.620822 0.444875 0.559017 0.322749
+0.590396 0.484526 0.536711 0.358619
+0.557443 0.522102 0.512107 0.392954
+0.522102 0.557443 0.485311 0.425606
+0.484526 0.590396 0.456435 0.456435
+0.444875 0.620822 0.425606 0.485311
+0.403318 0.648589 0.392954 0.512107
+0.360035 0.673579 0.358619 0.536711
+0.31521 0.695684 0.322749 0.559017
+0.269035 0.71481 0.285496 0.578929
+0.221709 0.730875 0.247021 0.596362
+0.173432 0.743811 0.207488 0.611241
+0.124413 0.753561 0.167067 0.623502
+0.0748617 0.760085 0.12593 0.633094
+0.0249895 0.763354 0.0842542 0.639975
+-0.0249897 0.763354 0.0422175 0.644115
+-0.0748619 0.760085 -5.40257e-08 0.645497
+-0.124414 0.753561 -0.0422176 0.644115
+-0.173432 0.743811 -0.0842543 0.639975
+-0.221709 0.730875 -0.12593 0.633094
+-0.269036 0.71481 -0.167067 0.623502
+-0.31521 0.695684 -0.207488 0.611241
+-0.360035 0.673579 -0.247021 0.596362
+-0.403318 0.648589 -0.285496 0.578929
+-0.444875 0.620822 -0.322749 0.559017
+-0.484526 0.590397 -0.358619 0.536711
+-0.522102 0.557443 -0.392954 0.512107
+-0.557443 0.522102 -0.425606 0.485311
+-0.590397 0.484526 -0.456435 0.456435
+-0.620822 0.444875 -0.48531 0.425606
+-0.648589 0.403318 -0.512107 0.392954
+-0.673578 0.360035 -0.536711 0.358619
+-0.695684 0.31521 -0.559017 0.322749
+-0.71481 0.269035 -0.578929 0.285496
+-0.730875 0.221709 -0.596362 0.247021
+-0.743811 0.173432 -0.611241 0.207488
+-0.753561 0.124414 -0.623502 0.167067
+-0.760085 0.0748618 -0.633094 0.12593
+-0.763354 0.0249897 -0.639975 0.0842543
+0.790146 0.0258667 0.612045 0.0200363
+0.786763 0.0774894 0.609424 0.060023
+0.78001 0.12878 0.604193 0.0997527
+0.769917 0.17952 0.596375 0.139055
+0.756528 0.22949 0.586004 0.177762
+0.739899 0.278478 0.573123 0.215708
+0.720101 0.326274 0.557788 0.25273
+0.69722 0.372672 0.540064 0.28867
+0.671353 0.417474 0.520028 0.323374
+0.642612 0.460489 0.497765 0.356693
+0.611118 0.501532 0.47337 0.388485
+0.577008 0.540427 0.446949 0.418613
+0.540427 0.577008 0.418613 0.446949
+0.501532 0.611118 0.388485 0.47337
+0.460489 0.642612 0.356693 0.497765
+0.417474 0.671353 0.323374 0.520028
+0.372672 0.69722 0.28867 0.540064
+0.326274 0.720101 0.25273 0.557788
+0.278478 0.739899 0.215708 0.573123
+0.22949 0.756528 0.177762 0.586004
+0.17952 0.769917 0.139055 0.596375
+0.12878 0.78001 0.0997526 0.604193
+0.0774893 0.786763 0.0600229 0.609424
+0.0258666 0.790146 0.0200362 0.612045
+-0.0258668 0.790146 -0.0200363 0.612045
+-0.0774894 0.786763 -0.060023 0.609424
+-0.12878 0.78001 -0.0997527 0.604193
+-0.17952 0.769917 -0.139055 0.596375
+-0.22949 0.756528 -0.177762 0.586004
+-0.278478 0.739899 -0.215708 0.573123
+-0.326274 0.720101 -0.25273 0.557788
+-0.372672 0.69722 -0.28867 0.540064
+-0.417474 0.671353 -0.323374 0.520028
+-0.460489 0.642612 -0.356693 0.497765
+-0.501532 0.611118 -0.388485 0.47337
+-0.540427 0.577008 -0.418613 0.446949
+-0.577008 0.540427 -0.446949 0.418613
+-0.611118 0.501532 -0.47337 0.388485
+-0.642612 0.460489 -0.497765 0.356693
+-0.671353 0.417474 -0.520028 0.323374
+-0.69722 0.372672 -0.540064 0.28867
+-0.720101 0.326274 -0.557788 0.25273
+-0.739899 0.278478 -0.573123 0.215708
+-0.756528 0.22949 -0.586004 0.177762
+-0.769917 0.179519 -0.596375 0.139055
+-0.78001 0.12878 -0.604193 0.0997527
+-0.786763 0.0774893 -0.609424 0.060023
+-0.790146 0.0258668 -0.612045 0.0200363
+0.790146 0.0258667 0.596375 0.139055
+0.786763 0.0774894 0.586004 0.177762
+0.78001 0.12878 0.573123 0.215708
+0.769917 0.17952 0.557788 0.25273
+0.756528 0.22949 0.540064 0.28867
+0.739899 0.278478 0.520028 0.323374
+0.720101 0.326274 0.497765 0.356693
+0.69722 0.372672 0.47337 0.388485
+0.671353 0.417474 0.446949 0.418613
+0.642612 0.460489 0.418613 0.446949
+0.611118 0.501532 0.388485 0.47337
+0.577008 0.540427 0.356693 0.497765
+0.540427 0.577008 0.323374 0.520028
+0.501532 0.611118 0.28867 0.540064
+0.460489 0.642612 0.25273 0.557788
+0.417474 0.671353 0.215708 0.573123
+0.372672 0.69722 0.177762 0.586004
+0.326274 0.720101 0.139055 0.596375
+0.278478 0.739899 0.0997527 0.604193
+0.22949 0.756528 0.060023 0.609424
+0.17952 0.769917 0.0200362 0.612045
+0.12878 0.78001 -0.0200363 0.612045
+0.0774893 0.786763 -0.0600231 0.609424
+0.0258666 0.790146 -0.0997528 0.604193
+-0.0258668 0.790146 -0.139055 0.596375
+-0.0774894 0.786763 -0.177762 0.586004
+-0.12878 0.78001 -0.215708 0.573123
+-0.17952 0.769917 -0.25273 0.557788
+-0.22949 0.756528 -0.28867 0.540064
+-0.278478 0.739899 -0.323374 0.520028
+-0.326274 0.720101 -0.356693 0.497765
+-0.372672 0.69722 -0.388485 0.47337
+-0.417474 0.671353 -0.418613 0.446949
+-0.460489 0.642612 -0.446949 0.418613
+-0.501532 0.611118 -0.47337 0.388485
+-0.540427 0.577008 -0.497765 0.356693
+-0.577008 0.540427 -0.520028 0.323374
+-0.611118 0.501532 -0.540064 0.28867
+-0.642612 0.460489 -0.557788 0.25273
+-0.671353 0.417474 -0.573123 0.215708
+-0.69722 0.372672 -0.586004 0.177762
+-0.720101 0.326274 -0.596375 0.139055
+-0.739899 0.278478 -0.604193 0.0997526
+-0.756528 0.22949 -0.609424 0.060023
+-0.769917 0.179519 -0.612045 0.0200362
+-0.78001 0.12878 -0.612045 -0.0200362
+-0.786763 0.0774893 -0.609424 -0.060023
+-0.790146 0.0258668 -0.604193 -0.0997526
+0.816059 0.026715 0.54671 0.185583
+0.812565 0.0800307 0.533402 0.220942
+0.805591 0.133004 0.51781 0.255355
+0.795167 0.185407 0.5 0.288675
+0.781339 0.237016 0.480049 0.320759
+0.764164 0.287611 0.458043 0.351469
+0.743717 0.336974 0.434075 0.380673
+0.720086 0.384894 0.408248 0.408248
+0.693371 0.431166 0.380673 0.434075
+0.663687 0.475591 0.351469 0.458043
+0.63116 0.51798 0.320759 0.480049
+0.595932 0.558151 0.288675 0.5
+0.558151 0.595932 0.255355 0.51781
+0.51798 0.63116 0.220942 0.533402
+0.475591 0.663687 0.185583 0.54671
+0.431166 0.693371 0.149429 0.557678
+0.384894 0.720086 0.112635 0.566257
+0.336974 0.743717 0.0753593 0.572411
+0.287611 0.764164 0.0377605 0.576114
+0.237016 0.781339 -2.48065e-08 0.57735
+0.185407 0.795167 -0.0377605 0.576114
+0.133003 0.805591 -0.0753594 0.572411
+0.0800306 0.812565 -0.112636 0.566257
+0.0267149 0.816059 -0.149429 0.557678
+-0.0267151 0.816059 -0.185583 0.54671
+-0.0800307 0.812565 -0.220942 0.533402
+-0.133004 0.805591 -0.255356 0.51781
+-0.185407 0.795167 -0.288675 0.5
+-0.237017 0.781338 -0.320759 0.480049
+-0.287611 0.764164 -0.351469 0.458043
+-0.336974 0.743717 -0.380674 0.434075
+-0.384894 0.720086 -0.408248 0.408248
+-0.431166 0.693371 -0.434075 0.380673
+-0.475591 0.663686 -0.458043 0.351469
+-0.51798 0.63116 -0.480049 0.320759
+-0.558151 0.595931 -0.5 0.288675
+-0.595931 0.558151 -0.51781 0.255356
+-0.63116 0.51798 -0.533402 0.220942
+-0.663686 0.475591 -0.54671 0.185583
+-0.693371 0.431166 -0.557678 0.149429
+-0.720086 0.384894 -0.566257 0.112636
+-0.743717 0.336974 -0.572411 0.0753593
+-0.764164 0.287611 -0.576114 0.0377605
+-0.781339 0.237016 -0.57735 1.87823e-08
+-0.795167 0.185407 -0.576114 -0.0377606
+-0.805591 0.133004 -0.572411 -0.0753593
+-0.812565 0.0800306 -0.566257 -0.112635
+-0.816059 0.0267151 -0.557678 -0.149429
+0.816059 0.026715 0.572411 0.0753593
+0.812565 0.0800307 0.566257 0.112635
+0.805591 0.133004 0.557678 0.149429
+0.795167 0.185407 0.54671 0.185583
+0.781339 0.237016 0.533402 0.220942
+0.764164 0.287611 0.51781 0.255355
+0.743717 0.336974 0.5 0.288675
+0.720086 0.384894 0.480049 0.320759
+0.693371 0.431166 0.458043 0.351469
+0.663687 0.475591 0.434075 0.380673
+0.63116 0.51798 0.408248 0.408248
+0.595932 0.558151 0.380673 0.434075
+0.558151 0.595932 0.351469 0.458043
+0.51798 0.63116 0.320759 0.480049
+0.475591 0.663687 0.288675 0.5
+0.431166 0.693371 0.255355 0.51781
+0.384894 0.720086 0.220942 0.533402
+0.336974 0.743717 0.185583 0.54671
+0.287611 0.764164 0.149429 0.557678
+0.237016 0.781339 0.112635 0.566257
+0.185407 0.795167 0.0753593 0.572411
+0.133003 0.805591 0.0377604 0.576114
+0.0800306 0.812565 -7.0977e-08 0.57735
+0.0267149 0.816059 -0.0377606 0.576114
+-0.0267151 0.816059 -0.0753594 0.572411
+-0.0800307 0.812565 -0.112635 0.566257
+-0.133004 0.805591 -0.149429 0.557678
+-0.185407 0.795167 -0.185583 0.54671
+-0.237017 0.781338 -0.220942 0.533402
+-0.287611 0.764164 -0.255356 0.51781
+-0.336974 0.743717 -0.288675 0.5
+-0.384894 0.720086 -0.320759 0.480049
+-0.431166 0.693371 -0.351469 0.458043
+-0.475591 0.663686 -0.380674 0.434075
+-0.51798 0.63116 -0.408248 0.408248
+-0.558151 0.595931 -0.434075 0.380673
+-0.595931 0.558151 -0.458043 0.351469
+-0.63116 0.51798 -0.480049 0.320759
+-0.663686 0.475591 -0.5 0.288675
+-0.693371 0.431166 -0.51781 0.255355
+-0.720086 0.384894 -0.533402 0.220942
+-0.743717 0.336974 -0.54671 0.185583
+-0.764164 0.287611 -0.557678 0.149429
+-0.781339 0.237016 -0.566257 0.112635
+-0.795167 0.185407 -0.572411 0.0753593
+-0.805591 0.133004 -0.576114 0.0377605
+-0.812565 0.0800306 -0.57735 -2.73883e-08
+-0.816059 0.0267151 -0.576114 -0.0377605
+0.841175 0.0275372 0.525954 0.122635
+0.837573 0.0824937 0.516807 0.156772
+0.830384 0.137097 0.505447 0.190237
+0.81964 0.191113 0.491923 0.222887
+0.805385 0.244311 0.476292 0.254583
+0.787682 0.296463 0.458622 0.285189
+0.766606 0.347345 0.438987 0.314574
+0.742247 0.396739 0.417473 0.342612
+0.71471 0.444435 0.394172 0.369182
+0.684112 0.490228 0.369182 0.394172
+0.650585 0.533921 0.342612 0.417473
+0.614272 0.575329 0.314574 0.438987
+0.575329 0.614272 0.285189 0.458622
+0.533922 0.650585 0.254583 0.476292
+0.490228 0.684112 0.222887 0.491923
+0.444435 0.71471 0.190237 0.505447
+0.396739 0.742247 0.156772 0.516807
+0.347345 0.766606 0.122635 0.525954
+0.296463 0.787682 0.0879736 0.532848
+0.244311 0.805385 0.0529353 0.537461
+0.191113 0.81964 0.0176703 0.539773
+0.137097 0.830384 -0.0176704 0.539773
+0.0824936 0.837573 -0.0529354 0.537461
+0.0275371 0.841175 -0.0879737 0.532848
+-0.0275373 0.841175 -0.122635 0.525954
+-0.0824938 0.837573 -0.156772 0.516807
+-0.137097 0.830384 -0.190237 0.505447
+-0.191113 0.81964 -0.222887 0.491923
+-0.244311 0.805385 -0.254583 0.476292
+-0.296463 0.787682 -0.285189 0.458622
+-0.347345 0.766606 -0.314574 0.438987
+-0.396739 0.742247 -0.342611 0.417473
+-0.444435 0.71471 -0.369182 0.394172
+-0.490228 0.684112 -0.394172 0.369182
+-0.533921 0.650585 -0.417473 0.342612
+-0.575329 0.614272 -0.438987 0.314574
+-0.614272 0.575329 -0.458622 0.285189
+-0.650585 0.533921 -0.476292 0.254583
+-0.684112 0.490228 -0.491923 0.222887
+-0.71471 0.444435 -0.505447 0.190237
+-0.742247 0.39674 -0.516807 0.156772
+-0.766606 0.347345 -0.525954 0.122635
+-0.787682 0.296463 -0.532848 0.0879736
+-0.805385 0.244311 -0.537461 0.0529353
+-0.81964 0.191113 -0.539773 0.0176703
+-0.830384 0.137097 -0.539773 -0.0176703
+-0.837573 0.0824937 -0.537461 -0.0529353
+-0.841175 0.0275373 -0.532848 -0.0879736
+0.790146 0.0258667 0.604193 -0.0997527
+0.786763 0.0774894 0.609424 -0.060023
+0.78001 0.12878 0.612045 -0.0200363
+0.769917 0.17952 0.612045 0.0200363
+0.756528 0.22949 0.609424 0.060023
+0.739899 0.278478 0.604193 0.0997527
+0.720101 0.326274 0.596375 0.139055
+0.69722 0.372672 0.586004 0.177762
+0.671353 0.417474 0.573123 0.215708
+0.642612 0.460489 0.557788 0.25273
+0.611118 0.501532 0.540064 0.28867
+0.577008 0.540427 0.520028 0.323374
+0.540427 0.577008 0.497765 0.356693
+0.501532 0.611118 0.47337 0.388485
+0.460489 0.642612 0.446949 0.418613
+0.417474 0.671353 0.418613 0.446949
+0.372672 0.69722 0.388485 0.47337
+0.326274 0.720101 0.356693 0.497765
+0.278478 0.739899 0.323374 0.520028
+0.22949 0.756528 0.28867 0.540064
+0.17952 0.769917 0.25273 0.557788
+0.12878 0.78001 0.215708 0.573123
+0.0774893 0.786763 0.177762 0.586004
+0.0258666 0.790146 0.139055 0.596375
+-0.0258668 0.790146 0.0997526 0.604193
+-0.0774894 0.786763 0.0600229 0.609424
+-0.12878 0.78001 0.0200362 0.612045
+-0.17952 0.769917 -0.0200363 0.612045
+-0.22949 0.756528 -0.0600231 0.609424
+-0.278478 0.739899 -0.0997528 0.604193
+-0.326274 0.720101 -0.139055 0.596375
+-0.372672 0.69722 -0.177762 0.586004
+-0.417474 0.671353 -0.215708 0.573123
+-0.460489 0.642612 -0.25273 0.557788
+-0.501532 0.611118 -0.28867 0.540064
+-0.540427 0.577008 -0.323374 0.520028
+-0.577008 0.540427 -0.356693 0.497765
+-0.611118 0.501532 -0.388485 0.47337
+-0.642612 0.460489 -0.418613 0.446949
+-0.671353 0.417474 -0.446949 0.418613
+-0.69722 0.372672 -0.47337 0.388485
+-0.720101 0.326274 -0.497765 0.356693
+-0.739899 0.278478 -0.520028 0.323374
+-0.756528 0.22949 -0.540064 0.28867
+-0.769917 0.179519 -0.557788 0.25273
+-0.78001 0.12878 -0.573123 0.215708
+-0.786763 0.0774893 -0.586004 0.177762
+-0.790146 0.0258668 -0.596375 0.139055
+0.816059 0.026715 0.576114 -0.0377605
+0.812565 0.0800307 0.57735 1.5773e-09
+0.805591 0.133004 0.576114 0.0377605
+0.795167 0.185407 0.572411 0.0753593
+0.781339 0.237016 0.566257 0.112635
+0.764164 0.287611 0.557678 0.149429
+0.743717 0.336974 0.54671 0.185583
+0.720086 0.384894 0.533402 0.220942
+0.693371 0.431166 0.51781 0.255355
+0.663687 0.475591 0.5 0.288675
+0.63116 0.51798 0.480049 0.320759
+0.595932 0.558151 0.458043 0.351469
+0.558151 0.595932 0.434075 0.380673
+0.51798 0.63116 0.408248 0.408248
+0.475591 0.663687 0.380673 0.434075
+0.431166 0.693371 0.351469 0.458043
+0.384894 0.720086 0.320759 0.480049
+0.336974 0.743717 0.288675 0.5
+0.287611 0.764164 0.255355 0.51781
+0.237016 0.781339 0.220942 0.533402
+0.185407 0.795167 0.185583 0.54671
+0.133003 0.805591 0.149429 0.557678
+0.0800306 0.812565 0.112635 0.566257
+0.0267149 0.816059 0.0753593 0.572411
+-0.0267151 0.816059 0.0377605 0.576114
+-0.0800307 0.812565 -4.83221e-08 0.57735
+-0.133004 0.805591 -0.0377606 0.576114
+-0.185407 0.795167 -0.0753594 0.572411
+-0.237017 0.781338 -0.112636 0.566257
+-0.287611 0.764164 -0.149429 0.557678
+-0.336974 0.743717 -0.185583 0.54671
+-0.384894 0.720086 -0.220942 0.533402
+-0.431166 0.693371 -0.255355 0.51781
+-0.475591 0.663686 -0.288675 0.5
+-0.51798 0.63116 -0.320759 0.480049
+-0.558151 0.595931 -0.351469 0.458043
+-0.595931 0.558151 -0.380673 0.434075
+-0.63116 0.51798 -0.408248 0.408248
+-0.663686 0.475591 -0.434075 0.380674
+-0.693371 0.431166 -0.458043 0.351469
+-0.720086 0.384894 -0.480049 0.320759
+-0.743717 0.336974 -0.5 0.288675
+-0.764164 0.287611 -0.51781 0.255355
+-0.781339 0.237016 -0.533402 0.220942
+-0.795167 0.185407 -0.54671 0.185583
+-0.805591 0.133004 -0.557678 0.149429
+-0.812565 0.0800306 -0.566257 0.112635
+-0.816059 0.0267151 -0.572411 0.0753594
+0.816059 0.026715 0.557678 -0.149429
+0.812565 0.0800307 0.566257 -0.112635
+0.805591 0.133004 0.572411 -0.0753593
+0.795167 0.185407 0.576114 -0.0377605
+0.781339 0.237016 0.57735 4.30302e-10
+0.764164 0.287611 0.576114 0.0377605
+0.743717 0.336974 0.572411 0.0753593
+0.720086 0.384894 0.566257 0.112635
+0.693371 0.431166 0.557678 0.149429
+0.663687 0.475591 0.54671 0.185583
+0.63116 0.51798 0.533402 0.220942
+0.595932 0.558151 0.51781 0.255356
+0.558151 0.595932 0.5 0.288675
+0.51798 0.63116 0.480049 0.320759
+0.475591 0.663687 0.458043 0.351469
+0.431166 0.693371 0.434075 0.380674
+0.384894 0.720086 0.408248 0.408248
+0.336974 0.743717 0.380673 0.434075
+0.287611 0.764164 0.351469 0.458043
+0.237016 0.781339 0.320759 0.480049
+0.185407 0.795167 0.288675 0.5
+0.133003 0.805591 0.255355 0.51781
+0.0800306 0.812565 0.220942 0.533402
+0.0267149 0.816059 0.185583 0.54671
+-0.0267151 0.816059 0.149429 0.557678
+-0.0800307 0.812565 0.112635 0.566257
+-0.133004 0.805591 0.0753593 0.572411
+-0.185407 0.795167 0.0377605 0.576114
+-0.237017 0.781338 -9.44926e-08 0.57735
+-0.287611 0.764164 -0.0377606 0.576114
+-0.336974 0.743717 -0.0753594 0.572411
+-0.384894 0.720086 -0.112635 0.566257
+-0.431166 0.693371 -0.149429 0.557678
+-0.475591 0.663686 -0.185583 0.54671
+-0.51798 0.63116 -0.220942 0.533402
+-0.558151 0.595931 -0.255356 0.51781
+-0.595931 0.558151 -0.288675 0.5
+-0.63116 0.51798 -0.320759 0.480049
+-0.663686 0.475591 -0.351469 0.458043
+-0.693371 0.431166 -0.380673 0.434075
+-0.720086 0.384894 -0.408248 0.408248
+-0.743717 0.336974 -0.434075 0.380673
+-0.764164 0.287611 -0.458043 0.351469
+-0.781339 0.237016 -0.480049 0.320759
+-0.795167 0.185407 -0.5 0.288675
+-0.805591 0.133004 -0.51781 0.255356
+-0.812565 0.0800306 -0.533402 0.220942
+-0.816059 0.0267151 -0.54671 0.185583
+0.841175 0.0275372 0.532848 -0.0879736
+0.837573 0.0824937 0.537461 -0.0529353
+0.830384 0.137097 0.539773 -0.0176703
+0.81964 0.191113 0.539773 0.0176703
+0.805385 0.244311 0.537461 0.0529353
+0.787682 0.296463 0.532848 0.0879736
+0.766606 0.347345 0.525954 0.122635
+0.742247 0.396739 0.516807 0.156772
+0.71471 0.444435 0.505447 0.190237
+0.684112 0.490228 0.491923 0.222887
+0.650585 0.533921 0.476292 0.254583
+0.614272 0.575329 0.458622 0.285189
+0.575329 0.614272 0.438987 0.314574
+0.533922 0.650585 0.417473 0.342612
+0.490228 0.684112 0.394172 0.369182
+0.444435 0.71471 0.369182 0.394172
+0.396739 0.742247 0.342611 0.417473
+0.347345 0.766606 0.314574 0.438987
+0.296463 0.787682 0.285189 0.458622
+0.244311 0.805385 0.254583 0.476292
+0.191113 0.81964 0.222887 0.491923
+0.137097 0.830384 0.190237 0.505447
+0.0824936 0.837573 0.156772 0.516807
+0.0275371 0.841175 0.122635 0.525954
+-0.0275373 0.841175 0.0879736 0.532848
+-0.0824938 0.837573 0.0529353 0.537461
+-0.137097 0.830384 0.0176703 0.539773
+-0.191113 0.81964 -0.0176704 0.539773
+-0.244311 0.805385 -0.0529354 0.537461
+-0.296463 0.787682 -0.0879737 0.532848
+-0.347345 0.766606 -0.122635 0.525954
+-0.396739 0.742247 -0.156772 0.516807
+-0.444435 0.71471 -0.190237 0.505447
+-0.490228 0.684112 -0.222887 0.491923
+-0.533921 0.650585 -0.254583 0.476292
+-0.575329 0.614272 -0.285189 0.458622
+-0.614272 0.575329 -0.314574 0.438987
+-0.650585 0.533921 -0.342612 0.417473
+-0.684112 0.490228 -0.369182 0.394172
+-0.71471 0.444435 -0.394172 0.369182
+-0.742247 0.39674 -0.417473 0.342612
+-0.766606 0.347345 -0.438987 0.314574
+-0.787682 0.296463 -0.458622 0.285189
+-0.805385 0.244311 -0.476292 0.254583
+-0.81964 0.191113 -0.491923 0.222887
+-0.830384 0.137097 -0.505447 0.190237
+-0.837573 0.0824937 -0.516807 0.156772
+-0.841175 0.0275373 -0.525954 0.122635
+0.841175 0.0275372 0.539773 0.0176703
+0.837573 0.0824937 0.537461 0.0529353
+0.830384 0.137097 0.532848 0.0879736
+0.81964 0.191113 0.525954 0.122635
+0.805385 0.244311 0.516807 0.156772
+0.787682 0.296463 0.505447 0.190237
+0.766606 0.347345 0.491923 0.222887
+0.742247 0.396739 0.476292 0.254583
+0.71471 0.444435 0.458622 0.285189
+0.684112 0.490228 0.438987 0.314574
+0.650585 0.533921 0.417473 0.342612
+0.614272 0.575329 0.394172 0.369182
+0.575329 0.614272 0.369182 0.394172
+0.533922 0.650585 0.342612 0.417473
+0.490228 0.684112 0.314574 0.438987
+0.444435 0.71471 0.285189 0.458622
+0.396739 0.742247 0.254583 0.476292
+0.347345 0.766606 0.222887 0.491923
+0.296463 0.787682 0.190237 0.505447
+0.244311 0.805385 0.156772 0.516807
+0.191113 0.81964 0.122635 0.525954
+0.137097 0.830384 0.0879735 0.532848
+0.0824936 0.837573 0.0529352 0.537461
+0.0275371 0.841175 0.0176703 0.539773
+-0.0275373 0.841175 -0.0176704 0.539773
+-0.0824938 0.837573 -0.0529354 0.537461
+-0.137097 0.830384 -0.0879736 0.532848
+-0.191113 0.81964 -0.122635 0.525954
+-0.244311 0.805385 -0.156772 0.516807
+-0.296463 0.787682 -0.190237 0.505447
+-0.347345 0.766606 -0.222887 0.491923
+-0.396739 0.742247 -0.254583 0.476292
+-0.444435 0.71471 -0.285189 0.458622
+-0.490228 0.684112 -0.314574 0.438987
+-0.533921 0.650585 -0.342612 0.417473
+-0.575329 0.614272 -0.369182 0.394172
+-0.614272 0.575329 -0.394172 0.369182
+-0.650585 0.533921 -0.417473 0.342612
+-0.684112 0.490228 -0.438987 0.314574
+-0.71471 0.444435 -0.458622 0.285189
+-0.742247 0.39674 -0.476292 0.254583
+-0.766606 0.347345 -0.491923 0.222887
+-0.787682 0.296463 -0.505447 0.190237
+-0.805385 0.244311 -0.516807 0.156772
+-0.81964 0.191113 -0.525954 0.122635
+-0.830384 0.137097 -0.532848 0.0879736
+-0.837573 0.0824937 -0.537461 0.0529353
+-0.841175 0.0275373 -0.539773 0.0176704
+0.865562 0.0283356 0.495722 0.0652631
+0.861855 0.0848853 0.490393 0.0975452
+0.854458 0.141072 0.482963 0.12941
+0.843402 0.196654 0.473465 0.16072
+0.828735 0.251394 0.46194 0.191342
+0.810518 0.305057 0.448436 0.221144
+0.788831 0.357415 0.433013 0.25
+0.763766 0.408242 0.415735 0.277785
+0.735431 0.45732 0.396677 0.304381
+0.703946 0.50444 0.37592 0.329673
+0.669447 0.549401 0.353553 0.353553
+0.632081 0.592008 0.329673 0.37592
+0.592008 0.632081 0.304381 0.396677
+0.549401 0.669447 0.277785 0.415735
+0.50444 0.703946 0.25 0.433013
+0.45732 0.735431 0.221144 0.448436
+0.408241 0.763766 0.191342 0.46194
+0.357415 0.788831 0.16072 0.473465
+0.305057 0.810518 0.129409 0.482963
+0.251394 0.828735 0.0975451 0.490393
+0.196654 0.843402 0.0652631 0.495722
+0.141072 0.854458 0.0327015 0.498929
+0.0848852 0.861855 -6.14679e-08 0.5
+0.0283355 0.865562 -0.0327016 0.498929
+-0.0283356 0.865562 -0.0652631 0.495722
+-0.0848854 0.861855 -0.0975452 0.490393
+-0.141072 0.854458 -0.12941 0.482963
+-0.196654 0.843402 -0.16072 0.473465
+-0.251394 0.828735 -0.191342 0.46194
+-0.305058 0.810518 -0.221144 0.448436
+-0.357415 0.788831 -0.25 0.433013
+-0.408241 0.763766 -0.277785 0.415735
+-0.45732 0.735431 -0.304381 0.396677
+-0.504441 0.703946 -0.329673 0.37592
+-0.549401 0.669447 -0.353553 0.353553
+-0.592008 0.632081 -0.37592 0.329673
+-0.632081 0.592008 -0.396677 0.304381
+-0.669447 0.549401 -0.415735 0.277785
+-0.703946 0.504441 -0.433013 0.25
+-0.735431 0.45732 -0.448436 0.221144
+-0.763766 0.408242 -0.46194 0.191342
+-0.788831 0.357415 -0.473465 0.16072
+-0.810518 0.305057 -0.482963 0.129409
+-0.828735 0.251394 -0.490393 0.0975452
+-0.843402 0.196654 -0.495722 0.0652631
+-0.854458 0.141072 -0.498929 0.0327016
+-0.861855 0.0848853 -0.5 -2.3719e-08
+-0.865562 0.0283356 -0.498929 -0.0327015
+0.865562 0.0283356 0.498929 -0.0327016
+0.861855 0.0848853 0.5 1.36598e-09
+0.854458 0.141072 0.498929 0.0327016
+0.843402 0.196654 0.495722 0.0652631
+0.828735 0.251394 0.490393 0.0975452
+0.810518 0.305057 0.482963 0.12941
+0.788831 0.357415 0.473465 0.16072
+0.763766 0.408242 0.46194 0.191342
+0.735431 0.45732 0.448436 0.221144
+0.703946 0.50444 0.433013 0.25
+0.669447 0.549401 0.415735 0.277785
+0.632081 0.592008 0.396677 0.304381
+0.592008 0.632081 0.37592 0.329673
+0.549401 0.669447 0.353553 0.353553
+0.50444 0.703946 0.329673 0.37592
+0.45732 0.735431 0.304381 0.396677
+0.408241 0.763766 0.277785 0.415735
+0.357415 0.788831 0.25 0.433013
+0.305057 0.810518 0.221144 0.448436
+0.251394 0.828735 0.191342 0.46194
+0.196654 0.843402 0.16072 0.473465
+0.141072 0.854458 0.129409 0.482963
+0.0848852 0.861855 0.0975451 0.490393
+0.0283355 0.865562 0.065263 0.495722
+-0.0283356 0.865562 0.0327015 0.498929
+-0.0848854 0.861855 -4.18481e-08 0.5
+-0.141072 0.854458 -0.0327016 0.498929
+-0.196654 0.843402 -0.0652631 0.495722
+-0.251394 0.828735 -0.0975452 0.490393
+-0.305058 0.810518 -0.12941 0.482963
+-0.357415 0.788831 -0.16072 0.473465
+-0.408241 0.763766 -0.191342 0.46194
+-0.45732 0.735431 -0.221144 0.448436
+-0.504441 0.703946 -0.25 0.433013
+-0.549401 0.669447 -0.277785 0.415735
+-0.592008 0.632081 -0.304381 0.396677
+-0.632081 0.592008 -0.329673 0.37592
+-0.669447 0.549401 -0.353553 0.353553
+-0.703946 0.504441 -0.37592 0.329673
+-0.735431 0.45732 -0.396677 0.304381
+-0.763766 0.408242 -0.415735 0.277785
+-0.788831 0.357415 -0.433013 0.25
+-0.810518 0.305057 -0.448436 0.221144
+-0.828735 0.251394 -0.46194 0.191342
+-0.843402 0.196654 -0.473465 0.16072
+-0.854458 0.141072 -0.482963 0.12941
+-0.861855 0.0848853 -0.490393 0.0975451
+-0.865562 0.0283356 -0.495722 0.0652631
+0.88928 0.029112 0.456191 0.0149342
+0.885472 0.0872114 0.454238 0.0447385
+0.877872 0.144937 0.450339 0.0743513
+0.866513 0.202043 0.444512 0.103646
+0.851444 0.258283 0.436782 0.132496
+0.832728 0.313417 0.427181 0.160779
+0.810447 0.367209 0.415751 0.188374
+0.784695 0.419428 0.40254 0.215162
+0.755583 0.469852 0.387606 0.241029
+0.723236 0.518263 0.371012 0.265863
+0.687791 0.564456 0.352829 0.28956
+0.649401 0.608231 0.333136 0.312016
+0.608231 0.649401 0.312016 0.333136
+0.564456 0.687791 0.28956 0.352829
+0.518263 0.723236 0.265863 0.371012
+0.469852 0.755583 0.241029 0.387606
+0.419428 0.784695 0.215162 0.40254
+0.367209 0.810447 0.188374 0.415751
+0.313417 0.832728 0.160779 0.427181
+0.258283 0.851444 0.132496 0.436782
+0.202043 0.866513 0.103646 0.444512
+0.144937 0.877872 0.0743512 0.450339
+0.0872113 0.885472 0.0447384 0.454238
+0.0291119 0.88928 0.0149341 0.456191
+-0.0291121 0.88928 -0.0149342 0.456191
+-0.0872115 0.885472 -0.0447385 0.454238
+-0.144937 0.877872 -0.0743513 0.450339
+-0.202043 0.866513 -0.103646 0.444512
+-0.258283 0.851444 -0.132496 0.436781
+-0.313417 0.832728 -0.160779 0.427181
+-0.367209 0.810447 -0.188374 0.415751
+-0.419428 0.784695 -0.215162 0.40254
+-0.469852 0.755583 -0.241029 0.387606
+-0.518263 0.723236 -0.265864 0.371012
+-0.564456 0.687791 -0.28956 0.352829
+-0.608231 0.649401 -0.312016 0.333136
+-0.649401 0.608231 -0.333136 0.312016
+-0.687791 0.564456 -0.352829 0.28956
+-0.723236 0.518263 -0.371012 0.265864
+-0.755583 0.469852 -0.387606 0.241029
+-0.784695 0.419428 -0.40254 0.215162
+-0.810447 0.367209 -0.415751 0.188374
+-0.832728 0.313417 -0.427181 0.160779
+-0.851444 0.258283 -0.436782 0.132496
+-0.866513 0.202043 -0.444512 0.103646
+-0.877872 0.144937 -0.450339 0.0743513
+-0.885472 0.0872113 -0.454238 0.0447385
+-0.88928 0.0291121 -0.456191 0.0149342
+0.456191 0.0149342 -0.029112 0.88928
+0.454238 0.0447385 -0.0872114 0.885472
+0.450339 0.0743513 -0.144937 0.877872
+0.444512 0.103646 -0.202043 0.866513
+0.436782 0.132496 -0.258283 0.851444
+0.427181 0.160779 -0.313417 0.832728
+0.415751 0.188374 -0.367209 0.810447
+0.40254 0.215162 -0.419428 0.784695
+0.387606 0.241029 -0.469852 0.755583
+0.371012 0.265863 -0.518263 0.723236
+0.352829 0.28956 -0.564456 0.687791
+0.333136 0.312016 -0.608231 0.649401
+0.312016 0.333136 -0.649401 0.608231
+0.28956 0.352829 -0.687791 0.564456
+0.265863 0.371012 -0.723236 0.518263
+0.241029 0.387606 -0.755583 0.469852
+0.215162 0.40254 -0.784695 0.419428
+0.188374 0.415751 -0.810447 0.367209
+0.160779 0.427181 -0.832728 0.313417
+0.132496 0.436782 -0.851444 0.258283
+0.103646 0.444512 -0.866513 0.202043
+0.0743512 0.450339 -0.877872 0.144937
+0.0447384 0.454238 -0.885472 0.0872113
+0.0149341 0.456191 -0.88928 0.0291119
+-0.0149342 0.456191 -0.88928 -0.0291121
+-0.0447385 0.454238 -0.885472 -0.0872115
+-0.0743513 0.450339 -0.877872 -0.144937
+-0.103646 0.444512 -0.866513 -0.202043
+-0.132496 0.436781 -0.851444 -0.258283
+-0.160779 0.427181 -0.832728 -0.313417
+-0.188374 0.415751 -0.810447 -0.367209
+-0.215162 0.40254 -0.784695 -0.419428
+-0.241029 0.387606 -0.755583 -0.469852
+-0.265864 0.371012 -0.723236 -0.518263
+-0.28956 0.352829 -0.687791 -0.564456
+-0.312016 0.333136 -0.649401 -0.608231
+-0.333136 0.312016 -0.608231 -0.649401
+-0.352829 0.28956 -0.564456 -0.687791
+-0.371012 0.265864 -0.518263 -0.723236
+-0.387606 0.241029 -0.469852 -0.755583
+-0.40254 0.215162 -0.419428 -0.784695
+-0.415751 0.188374 -0.367209 -0.810447
+-0.427181 0.160779 -0.313417 -0.832728
+-0.436782 0.132496 -0.258283 -0.851444
+-0.444512 0.103646 -0.202043 -0.866513
+-0.450339 0.0743513 -0.144937 -0.877872
+-0.454238 0.0447385 -0.0872113 -0.885472
+-0.456191 0.0149342 -0.0291121 -0.88928
+0.499732 0.0163595 -0.113039 0.858616
+0.497592 0.0490086 -0.168953 0.849385
+0.493322 0.0814477 -0.224144 0.836516
+0.486938 0.113538 -0.278375 0.820066
+0.47847 0.145142 -0.331414 0.800103
+0.467953 0.176125 -0.383033 0.776715
+0.455432 0.206354 -0.433013 0.75
+0.440961 0.235698 -0.481138 0.720074
+0.424601 0.264034 -0.527203 0.687064
+0.406423 0.291239 -0.57101 0.651112
+0.386505 0.317197 -0.612372 0.612372
+0.364932 0.341796 -0.651112 0.57101
+0.341796 0.364932 -0.687064 0.527203
+0.317197 0.386505 -0.720074 0.481138
+0.291239 0.406423 -0.75 0.433013
+0.264034 0.424601 -0.776715 0.383033
+0.235698 0.440961 -0.800103 0.331414
+0.206353 0.455432 -0.820066 0.278375
+0.176125 0.467953 -0.836516 0.224144
+0.145142 0.47847 -0.849385 0.168953
+0.113538 0.486938 -0.858616 0.113039
+0.0814477 0.493322 -0.864171 0.0566407
+0.0490085 0.497592 -0.866025 -1.06466e-07
+0.0163595 0.499732 -0.864171 -0.0566409
+-0.0163596 0.499732 -0.858616 -0.113039
+-0.0490086 0.497592 -0.849385 -0.168953
+-0.0814478 0.493322 -0.836516 -0.224144
+-0.113538 0.486938 -0.820066 -0.278375
+-0.145142 0.47847 -0.800103 -0.331414
+-0.176125 0.467953 -0.776715 -0.383033
+-0.206354 0.455432 -0.75 -0.433013
+-0.235698 0.440961 -0.720074 -0.481138
+-0.264034 0.424601 -0.687064 -0.527203
+-0.291239 0.406423 -0.651112 -0.57101
+-0.317197 0.386505 -0.612372 -0.612372
+-0.341796 0.364932 -0.57101 -0.651112
+-0.364932 0.341796 -0.527203 -0.687064
+-0.386505 0.317197 -0.481138 -0.720074
+-0.406423 0.291239 -0.433013 -0.75
+-0.424601 0.264034 -0.383033 -0.776715
+-0.440961 0.235698 -0.331414 -0.800103
+-0.455432 0.206354 -0.278375 -0.820066
+-0.467953 0.176125 -0.224144 -0.836516
+-0.47847 0.145142 -0.168953 -0.849385
+-0.486938 0.113538 -0.113039 -0.858616
+-0.493322 0.0814478 -0.0566408 -0.864171
+-0.497592 0.0490085 4.10824e-08 -0.866025
+-0.499732 0.0163596 0.0566407 -0.864171
+0.499732 0.0163595 0.0566408 0.864171
+0.497592 0.0490086 -2.36595e-09 0.866025
+0.493322 0.0814477 -0.0566408 0.864171
+0.486938 0.113538 -0.113039 0.858616
+0.47847 0.145142 -0.168953 0.849385
+0.467953 0.176125 -0.224144 0.836516
+0.455432 0.206354 -0.278375 0.820066
+0.440961 0.235698 -0.331414 0.800103
+0.424601 0.264034 -0.383033 0.776715
+0.406423 0.291239 -0.433013 0.75
+0.386505 0.317197 -0.481138 0.720074
+0.364932 0.341796 -0.527203 0.687064
+0.341796 0.364932 -0.57101 0.651112
+0.317197 0.386505 -0.612372 0.612372
+0.291239 0.406423 -0.651112 0.57101
+0.264034 0.424601 -0.687064 0.527203
+0.235698 0.440961 -0.720074 0.481138
+0.206353 0.455432 -0.75 0.433013
+0.176125 0.467953 -0.776715 0.383033
+0.145142 0.47847 -0.800103 0.331414
+0.113538 0.486938 -0.820066 0.278375
+0.0814477 0.493322 -0.836516 0.224144
+0.0490085 0.497592 -0.849385 0.168953
+0.0163595 0.499732 -0.858616 0.113039
+-0.0163596 0.499732 -0.864171 0.0566407
+-0.0490086 0.497592 -0.866025 -7.24831e-08
+-0.0814478 0.493322 -0.864171 -0.0566408
+-0.113538 0.486938 -0.858616 -0.113039
+-0.145142 0.47847 -0.849385 -0.168953
+-0.176125 0.467953 -0.836516 -0.224144
+-0.206354 0.455432 -0.820066 -0.278375
+-0.235698 0.440961 -0.800103 -0.331413
+-0.264034 0.424601 -0.776715 -0.383033
+-0.291239 0.406423 -0.75 -0.433013
+-0.317197 0.386505 -0.720074 -0.481138
+-0.341796 0.364932 -0.687064 -0.527203
+-0.364932 0.341796 -0.651112 -0.57101
+-0.386505 0.317197 -0.612372 -0.612372
+-0.406423 0.291239 -0.57101 -0.651112
+-0.424601 0.264034 -0.527203 -0.687064
+-0.440961 0.235698 -0.481138 -0.720074
+-0.455432 0.206354 -0.433013 -0.75
+-0.467953 0.176125 -0.383033 -0.776715
+-0.47847 0.145142 -0.331414 -0.800103
+-0.486938 0.113538 -0.278375 -0.820066
+-0.493322 0.0814478 -0.224144 -0.836516
+-0.497592 0.0490085 -0.168953 -0.849385
+-0.499732 0.0163596 -0.113039 -0.858616
+0.539773 0.0176703 -0.0275372 0.841175
+0.537461 0.0529353 -0.0824937 0.837573
+0.532848 0.0879736 -0.137097 0.830384
+0.525954 0.122635 -0.191113 0.81964
+0.516807 0.156772 -0.244311 0.805385
+0.505447 0.190237 -0.296463 0.787682
+0.491923 0.222887 -0.347345 0.766606
+0.476292 0.254583 -0.396739 0.742247
+0.458622 0.285189 -0.444435 0.71471
+0.438987 0.314574 -0.490228 0.684112
+0.417473 0.342612 -0.533921 0.650585
+0.394172 0.369182 -0.575329 0.614272
+0.369182 0.394172 -0.614272 0.575329
+0.342612 0.417473 -0.650585 0.533922
+0.314574 0.438987 -0.684112 0.490228
+0.285189 0.458622 -0.71471 0.444435
+0.254583 0.476292 -0.742247 0.396739
+0.222887 0.491923 -0.766606 0.347345
+0.190237 0.505447 -0.787682 0.296463
+0.156772 0.516807 -0.805385 0.244311
+0.122635 0.525954 -0.81964 0.191113
+0.0879735 0.532848 -0.830384 0.137097
+0.0529352 0.537461 -0.837573 0.0824936
+0.0176703 0.539773 -0.841175 0.0275371
+-0.0176704 0.539773 -0.841175 -0.0275373
+-0.0529354 0.537461 -0.837573 -0.0824938
+-0.0879736 0.532848 -0.830384 -0.137097
+-0.122635 0.525954 -0.81964 -0.191113
+-0.156772 0.516807 -0.805385 -0.244311
+-0.190237 0.505447 -0.787682 -0.296463
+-0.222887 0.491923 -0.766606 -0.347345
+-0.254583 0.476292 -0.742247 -0.396739
+-0.285189 0.458622 -0.71471 -0.444435
+-0.314574 0.438987 -0.684112 -0.490228
+-0.342612 0.417473 -0.650585 -0.533921
+-0.369182 0.394172 -0.614272 -0.575329
+-0.394172 0.369182 -0.575329 -0.614272
+-0.417473 0.342612 -0.533921 -0.650585
+-0.438987 0.314574 -0.490228 -0.684112
+-0.458622 0.285189 -0.444435 -0.71471
+-0.476292 0.254583 -0.39674 -0.742247
+-0.491923 0.222887 -0.347345 -0.766606
+-0.505447 0.190237 -0.296463 -0.787682
+-0.516807 0.156772 -0.244311 -0.805385
+-0.525954 0.122635 -0.191113 -0.81964
+-0.532848 0.0879736 -0.137097 -0.830384
+-0.537461 0.0529353 -0.0824937 -0.837573
+-0.539773 0.0176704 -0.0275373 -0.841175
+0.539773 0.0176703 -0.191113 0.81964
+0.537461 0.0529353 -0.244311 0.805385
+0.532848 0.0879736 -0.296463 0.787682
+0.525954 0.122635 -0.347345 0.766606
+0.516807 0.156772 -0.396739 0.742247
+0.505447 0.190237 -0.444435 0.71471
+0.491923 0.222887 -0.490228 0.684112
+0.476292 0.254583 -0.533922 0.650585
+0.458622 0.285189 -0.575329 0.614272
+0.438987 0.314574 -0.614272 0.575329
+0.417473 0.342612 -0.650585 0.533922
+0.394172 0.369182 -0.684112 0.490228
+0.369182 0.394172 -0.71471 0.444435
+0.342612 0.417473 -0.742247 0.396739
+0.314574 0.438987 -0.766606 0.347345
+0.285189 0.458622 -0.787682 0.296463
+0.254583 0.476292 -0.805385 0.244311
+0.222887 0.491923 -0.81964 0.191113
+0.190237 0.505447 -0.830384 0.137097
+0.156772 0.516807 -0.837573 0.0824937
+0.122635 0.525954 -0.841175 0.0275372
+0.0879735 0.532848 -0.841175 -0.0275373
+0.0529352 0.537461 -0.837573 -0.0824938
+0.0176703 0.539773 -0.830384 -0.137097
+-0.0176704 0.539773 -0.81964 -0.191113
+-0.0529354 0.537461 -0.805385 -0.244311
+-0.0879736 0.532848 -0.787682 -0.296463
+-0.122635 0.525954 -0.766606 -0.347345
+-0.156772 0.516807 -0.742247 -0.39674
+-0.190237 0.505447 -0.71471 -0.444435
+-0.222887 0.491923 -0.684112 -0.490228
+-0.254583 0.476292 -0.650585 -0.533921
+-0.285189 0.458622 -0.614272 -0.575329
+-0.314574 0.438987 -0.575329 -0.614272
+-0.342612 0.417473 -0.533922 -0.650585
+-0.369182 0.394172 -0.490228 -0.684112
+-0.394172 0.369182 -0.444435 -0.71471
+-0.417473 0.342612 -0.396739 -0.742247
+-0.438987 0.314574 -0.347345 -0.766606
+-0.458622 0.285189 -0.296463 -0.787682
+-0.476292 0.254583 -0.244311 -0.805385
+-0.491923 0.222887 -0.191113 -0.81964
+-0.505447 0.190237 -0.137097 -0.830384
+-0.516807 0.156772 -0.0824937 -0.837573
+-0.525954 0.122635 -0.0275371 -0.841175
+-0.532848 0.0879736 0.0275372 -0.841175
+-0.537461 0.0529353 0.0824938 -0.837573
+-0.539773 0.0176704 0.137097 -0.830384
+0.577041 0.0188904 -0.262454 0.773165
+0.57457 0.0565902 -0.31246 0.754344
+0.569639 0.0940477 -0.361127 0.732294
+0.562268 0.131103 -0.408248 0.707107
+0.55249 0.167596 -0.453621 0.678892
+0.540346 0.203372 -0.497052 0.64777
+0.525887 0.238277 -0.538354 0.613875
+0.509177 0.272161 -0.57735 0.57735
+0.490287 0.30488 -0.613875 0.538354
+0.469297 0.336294 -0.64777 0.497052
+0.446298 0.366267 -0.678892 0.453621
+0.421387 0.394672 -0.707107 0.408248
+0.394672 0.421387 -0.732294 0.361127
+0.366267 0.446298 -0.754344 0.31246
+0.336294 0.469297 -0.773165 0.262454
+0.30488 0.490287 -0.788675 0.211325
+0.272161 0.509178 -0.800808 0.159291
+0.238276 0.525887 -0.809511 0.106574
+0.203372 0.540346 -0.814748 0.0534014
+0.167596 0.55249 -0.816497 -3.50817e-08
+0.131103 0.562268 -0.814748 -0.0534015
+0.0940477 0.569639 -0.809511 -0.106574
+0.0565902 0.57457 -0.800808 -0.159291
+0.0188903 0.577041 -0.788675 -0.211325
+-0.0188904 0.577041 -0.773165 -0.262454
+-0.0565903 0.57457 -0.754344 -0.31246
+-0.0940478 0.569639 -0.732294 -0.361127
+-0.131103 0.562268 -0.707107 -0.408248
+-0.167596 0.55249 -0.678892 -0.453621
+-0.203372 0.540346 -0.64777 -0.497052
+-0.238277 0.525887 -0.613875 -0.538354
+-0.272161 0.509178 -0.57735 -0.57735
+-0.30488 0.490287 -0.538354 -0.613875
+-0.336294 0.469297 -0.497052 -0.64777
+-0.366267 0.446298 -0.453621 -0.678892
+-0.394672 0.421387 -0.408248 -0.707107
+-0.421387 0.394672 -0.361127 -0.732294
+-0.446298 0.366267 -0.31246 -0.754344
+-0.469297 0.336294 -0.262454 -0.773165
+-0.490287 0.30488 -0.211325 -0.788675
+-0.509177 0.272161 -0.159291 -0.800808
+-0.525887 0.238277 -0.106574 -0.809511
+-0.540346 0.203372 -0.0534014 -0.814748
+-0.55249 0.167596 -2.65621e-08 -0.816497
+-0.562268 0.131103 0.0534015 -0.814748
+-0.569639 0.0940478 0.106574 -0.809511
+-0.57457 0.0565902 0.159291 -0.800808
+-0.577041 0.0188904 0.211325 -0.788675
+0.577041 0.0188904 -0.106574 0.809511
+0.57457 0.0565902 -0.159291 0.800808
+0.569639 0.0940477 -0.211325 0.788675
+0.562268 0.131103 -0.262454 0.773165
+0.55249 0.167596 -0.31246 0.754344
+0.540346 0.203372 -0.361127 0.732294
+0.525887 0.238277 -0.408248 0.707107
+0.509177 0.272161 -0.453621 0.678892
+0.490287 0.30488 -0.497052 0.64777
+0.469297 0.336294 -0.538354 0.613875
+0.446298 0.366267 -0.57735 0.57735
+0.421387 0.394672 -0.613875 0.538354
+0.394672 0.421387 -0.64777 0.497052
+0.366267 0.446298 -0.678892 0.453621
+0.336294 0.469297 -0.707107 0.408248
+0.30488 0.490287 -0.732294 0.361127
+0.272161 0.509178 -0.754345 0.31246
+0.238276 0.525887 -0.773165 0.262454
+0.203372 0.540346 -0.788675 0.211325
+0.167596 0.55249 -0.800808 0.159291
+0.131103 0.562268 -0.809511 0.106574
+0.0940477 0.569639 -0.814748 0.0534013
+0.0565902 0.57457 -0.816497 -1.00377e-07
+0.0188903 0.577041 -0.814748 -0.0534015
+-0.0188904 0.577041 -0.809511 -0.106574
+-0.0565903 0.57457 -0.800808 -0.159291
+-0.0940478 0.569639 -0.788675 -0.211325
+-0.131103 0.562268 -0.773165 -0.262454
+-0.167596 0.55249 -0.754344 -0.31246
+-0.203372 0.540346 -0.732293 -0.361127
+-0.238277 0.525887 -0.707107 -0.408248
+-0.272161 0.509178 -0.678892 -0.453621
+-0.30488 0.490287 -0.64777 -0.497052
+-0.336294 0.469297 -0.613875 -0.538354
+-0.366267 0.446298 -0.57735 -0.57735
+-0.394672 0.421387 -0.538354 -0.613875
+-0.421387 0.394672 -0.497052 -0.64777
+-0.446298 0.366267 -0.453621 -0.678892
+-0.469297 0.336294 -0.408248 -0.707107
+-0.490287 0.30488 -0.361127 -0.732294
+-0.509177 0.272161 -0.31246 -0.754344
+-0.525887 0.238277 -0.262454 -0.773165
+-0.540346 0.203372 -0.211325 -0.788675
+-0.55249 0.167596 -0.159291 -0.800808
+-0.562268 0.131103 -0.106574 -0.809511
+-0.569639 0.0940478 -0.0534015 -0.814748
+-0.57457 0.0565902 3.87329e-08 -0.816497
+-0.577041 0.0188904 0.0534014 -0.814748
+0.612045 0.0200363 -0.17952 0.769917
+0.609424 0.060023 -0.22949 0.756528
+0.604193 0.0997527 -0.278478 0.739899
+0.596375 0.139055 -0.326274 0.720101
+0.586004 0.177762 -0.372672 0.69722
+0.573123 0.215708 -0.417474 0.671353
+0.557788 0.25273 -0.460489 0.642612
+0.540064 0.28867 -0.501532 0.611118
+0.520028 0.323374 -0.540427 0.577008
+0.497765 0.356693 -0.577008 0.540427
+0.47337 0.388485 -0.611118 0.501532
+0.446949 0.418613 -0.642612 0.460489
+0.418613 0.446949 -0.671353 0.417474
+0.388485 0.47337 -0.69722 0.372672
+0.356693 0.497765 -0.720101 0.326274
+0.323374 0.520028 -0.739899 0.278478
+0.28867 0.540064 -0.756528 0.22949
+0.25273 0.557788 -0.769917 0.179519
+0.215708 0.573123 -0.78001 0.12878
+0.177762 0.586004 -0.786763 0.0774893
+0.139055 0.596375 -0.790146 0.0258667
+0.0997526 0.604193 -0.790146 -0.0258668
+0.0600229 0.609424 -0.786763 -0.0774895
+0.0200362 0.612045 -0.78001 -0.12878
+-0.0200363 0.612045 -0.769917 -0.17952
+-0.060023 0.609424 -0.756528 -0.22949
+-0.0997527 0.604193 -0.739899 -0.278478
+-0.139055 0.596375 -0.720101 -0.326274
+-0.177762 0.586004 -0.69722 -0.372672
+-0.215708 0.573123 -0.671353 -0.417474
+-0.25273 0.557788 -0.642612 -0.460489
+-0.28867 0.540064 -0.611118 -0.501532
+-0.323374 0.520028 -0.577008 -0.540427
+-0.356693 0.497765 -0.540427 -0.577008
+-0.388485 0.47337 -0.501532 -0.611118
+-0.418613 0.446949 -0.460489 -0.642612
+-0.446949 0.418613 -0.417474 -0.671353
+-0.47337 0.388485 -0.372672 -0.69722
+-0.497765 0.356693 -0.326274 -0.720101
+-0.520028 0.323374 -0.278478 -0.739899
+-0.540064 0.28867 -0.22949 -0.756528
+-0.557788 0.25273 -0.17952 -0.769917
+-0.573123 0.215708 -0.12878 -0.78001
+-0.586004 0.177762 -0.0774894 -0.786763
+-0.596375 0.139055 -0.0258666 -0.790146
+-0.604193 0.0997527 0.0258667 -0.790146
+-0.609424 0.060023 0.0774894 -0.786763
+-0.612045 0.0200363 0.12878 -0.78001
+0.539773 0.0176703 0.137097 0.830384
+0.537461 0.0529353 0.0824937 0.837573
+0.532848 0.0879736 0.0275372 0.841175
+0.525954 0.122635 -0.0275372 0.841175
+0.516807 0.156772 -0.0824937 0.837573
+0.505447 0.190237 -0.137097 0.830384
+0.491923 0.222887 -0.191113 0.81964
+0.476292 0.254583 -0.244311 0.805385
+0.458622 0.285189 -0.296463 0.787682
+0.438987 0.314574 -0.347345 0.766606
+0.417473 0.342612 -0.396739 0.742247
+0.394172 0.369182 -0.444435 0.71471
+0.369182 0.394172 -0.490228 0.684112
+0.342612 0.417473 -0.533922 0.650585
+0.314574 0.438987 -0.575329 0.614272
+0.285189 0.458622 -0.614272 0.575329
+0.254583 0.476292 -0.650585 0.533921
+0.222887 0.491923 -0.684112 0.490228
+0.190237 0.505447 -0.71471 0.444435
+0.156772 0.516807 -0.742247 0.396739
+0.122635 0.525954 -0.766606 0.347345
+0.0879735 0.532848 -0.787682 0.296462
+0.0529352 0.537461 -0.805385 0.244311
+0.0176703 0.539773 -0.81964 0.191113
+-0.0176704 0.539773 -0.830384 0.137097
+-0.0529354 0.537461 -0.837573 0.0824936
+-0.0879736 0.532848 -0.841175 0.0275372
+-0.122635 0.525954 -0.841175 -0.0275373
+-0.156772 0.516807 -0.837573 -0.0824939
+-0.190237 0.505447 -0.830384 -0.137097
+-0.222887 0.491923 -0.81964 -0.191113
+-0.254583 0.476292 -0.805385 -0.244311
+-0.285189 0.458622 -0.787682 -0.296463
+-0.314574 0.438987 -0.766606 -0.347345
+-0.342612 0.417473 -0.742247 -0.396739
+-0.369182 0.394172 -0.71471 -0.444435
+-0.394172 0.369182 -0.684112 -0.490228
+-0.417473 0.342612 -0.650585 -0.533922
+-0.438987 0.314574 -0.614272 -0.575329
+-0.458622 0.285189 -0.575329 -0.614272
+-0.476292 0.254583 -0.533922 -0.650585
+-0.491923 0.222887 -0.490228 -0.684112
+-0.505447 0.190237 -0.444435 -0.71471
+-0.516807 0.156772 -0.396739 -0.742247
+-0.525954 0.122635 -0.347345 -0.766606
+-0.532848 0.0879736 -0.296463 -0.787682
+-0.537461 0.0529353 -0.244311 -0.805385
+-0.539773 0.0176704 -0.191113 -0.81964
+0.577041 0.0188904 0.0534014 0.814748
+0.57457 0.0565902 -2.23064e-09 0.816497
+0.569639 0.0940477 -0.0534014 0.814748
+0.562268 0.131103 -0.106574 0.809511
+0.55249 0.167596 -0.159291 0.800808
+0.540346 0.203372 -0.211325 0.788675
+0.525887 0.238277 -0.262454 0.773165
+0.509177 0.272161 -0.31246 0.754344
+0.490287 0.30488 -0.361127 0.732294
+0.469297 0.336294 -0.408248 0.707107
+0.446298 0.366267 -0.453621 0.678892
+0.421387 0.394672 -0.497052 0.64777
+0.394672 0.421387 -0.538354 0.613875
+0.366267 0.446298 -0.57735 0.57735
+0.336294 0.469297 -0.613875 0.538354
+0.30488 0.490287 -0.64777 0.497052
+0.272161 0.509178 -0.678892 0.453621
+0.238276 0.525887 -0.707107 0.408248
+0.203372 0.540346 -0.732294 0.361127
+0.167596 0.55249 -0.754344 0.31246
+0.131103 0.562268 -0.773165 0.262454
+0.0940477 0.569639 -0.788675 0.211325
+0.0565902 0.57457 -0.800808 0.15929
+0.0188903 0.577041 -0.809511 0.106574
+-0.0188904 0.577041 -0.814748 0.0534014
+-0.0565903 0.57457 -0.816497 -6.83377e-08
+-0.0940478 0.569639 -0.814748 -0.0534015
+-0.131103 0.562268 -0.809511 -0.106574
+-0.167596 0.55249 -0.800808 -0.159291
+-0.203372 0.540346 -0.788675 -0.211325
+-0.238277 0.525887 -0.773165 -0.262454
+-0.272161 0.509178 -0.754345 -0.31246
+-0.30488 0.490287 -0.732294 -0.361127
+-0.336294 0.469297 -0.707107 -0.408248
+-0.366267 0.446298 -0.678892 -0.453621
+-0.394672 0.421387 -0.64777 -0.497052
+-0.421387 0.394672 -0.613875 -0.538354
+-0.446298 0.366267 -0.57735 -0.57735
+-0.469297 0.336294 -0.538354 -0.613875
+-0.490287 0.30488 -0.497052 -0.64777
+-0.509177 0.272161 -0.453621 -0.678892
+-0.525887 0.238277 -0.408248 -0.707107
+-0.540346 0.203372 -0.361127 -0.732294
+-0.55249 0.167596 -0.31246 -0.754344
+-0.562268 0.131103 -0.262454 -0.773165
+-0.569639 0.0940478 -0.211325 -0.788675
+-0.57457 0.0565902 -0.159291 -0.800808
+-0.577041 0.0188904 -0.106574 -0.809511
+0.577041 0.0188904 0.211325 0.788675
+0.57457 0.0565902 0.159291 0.800808
+0.569639 0.0940477 0.106574 0.809511
+0.562268 0.131103 0.0534014 0.814748
+0.55249 0.167596 -6.08539e-10 0.816497
+0.540346 0.203372 -0.0534014 0.814748
+0.525887 0.238277 -0.106574 0.809511
+0.509177 0.272161 -0.159291 0.800808
+0.490287 0.30488 -0.211325 0.788675
+0.469297 0.336294 -0.262454 0.773165
+0.446298 0.366267 -0.31246 0.754344
+0.421387 0.394672 -0.361127 0.732294
+0.394672 0.421387 -0.408248 0.707107
+0.366267 0.446298 -0.453621 0.678892
+0.336294 0.469297 -0.497052 0.64777
+0.30488 0.490287 -0.538354 0.613875
+0.272161 0.509178 -0.57735 0.57735
+0.238276 0.525887 -0.613875 0.538354
+0.203372 0.540346 -0.64777 0.497052
+0.167596 0.55249 -0.678892 0.453621
+0.131103 0.562268 -0.707107 0.408248
+0.0940477 0.569639 -0.732294 0.361127
+0.0565902 0.57457 -0.754345 0.31246
+0.0188903 0.577041 -0.773165 0.262454
+-0.0188904 0.577041 -0.788675 0.211325
+-0.0565903 0.57457 -0.800808 0.159291
+-0.0940478 0.569639 -0.809511 0.106574
+-0.131103 0.562268 -0.814748 0.0534014
+-0.167596 0.55249 -0.816497 -1.33633e-07
+-0.203372 0.540346 -0.814748 -0.0534016
+-0.238277 0.525887 -0.809511 -0.106574
+-0.272161 0.509178 -0.800808 -0.15929
+-0.30488 0.490287 -0.788675 -0.211325
+-0.336294 0.469297 -0.773165 -0.262454
+-0.366267 0.446298 -0.754344 -0.31246
+-0.394672 0.421387 -0.732294 -0.361127
+-0.421387 0.394672 -0.707107 -0.408248
+-0.446298 0.366267 -0.678892 -0.453621
+-0.469297 0.336294 -0.64777 -0.497052
+-0.490287 0.30488 -0.613875 -0.538354
+-0.509177 0.272161 -0.57735 -0.57735
+-0.525887 0.238277 -0.538354 -0.613875
+-0.540346 0.203372 -0.497052 -0.64777
+-0.55249 0.167596 -0.453621 -0.678892
+-0.562268 0.131103 -0.408248 -0.707107
+-0.569639 0.0940478 -0.361127 -0.732294
+-0.57457 0.0565902 -0.31246 -0.754344
+-0.577041 0.0188904 -0.262454 -0.773165
+0.612045 0.0200363 0.12878 0.78001
+0.609424 0.060023 0.0774894 0.786763
+0.604193 0.0997527 0.0258667 0.790146
+0.596375 0.139055 -0.0258667 0.790146
+0.586004 0.177762 -0.0774894 0.786763
+0.573123 0.215708 -0.12878 0.78001
+0.557788 0.25273 -0.17952 0.769917
+0.540064 0.28867 -0.22949 0.756528
+0.520028 0.323374 -0.278478 0.739899
+0.497765 0.356693 -0.326274 0.720101
+0.47337 0.388485 -0.372672 0.69722
+0.446949 0.418613 -0.417474 0.671353
+0.418613 0.446949 -0.460489 0.642612
+0.388485 0.47337 -0.501532 0.611118
+0.356693 0.497765 -0.540427 0.577008
+0.323374 0.520028 -0.577008 0.540427
+0.28867 0.540064 -0.611118 0.501532
+0.25273 0.557788 -0.642612 0.460489
+0.215708 0.573123 -0.671353 0.417474
+0.177762 0.586004 -0.69722 0.372672
+0.139055 0.596375 -0.720101 0.326274
+0.0997526 0.604193 -0.739899 0.278478
+0.0600229 0.609424 -0.756528 0.22949
+0.0200362 0.612045 -0.769917 0.179519
+-0.0200363 0.612045 -0.78001 0.12878
+-0.060023 0.609424 -0.786763 0.0774893
+-0.0997527 0.604193 -0.790146 0.0258667
+-0.139055 0.596375 -0.790146 -0.0258668
+-0.177762 0.586004 -0.786763 -0.0774895
+-0.215708 0.573123 -0.78001 -0.12878
+-0.25273 0.557788 -0.769917 -0.17952
+-0.28867 0.540064 -0.756528 -0.22949
+-0.323374 0.520028 -0.739899 -0.278478
+-0.356693 0.497765 -0.720101 -0.326274
+-0.388485 0.47337 -0.69722 -0.372672
+-0.418613 0.446949 -0.671353 -0.417474
+-0.446949 0.418613 -0.642612 -0.460489
+-0.47337 0.388485 -0.611118 -0.501532
+-0.497765 0.356693 -0.577008 -0.540427
+-0.520028 0.323374 -0.540427 -0.577008
+-0.540064 0.28867 -0.501532 -0.611118
+-0.557788 0.25273 -0.460489 -0.642612
+-0.573123 0.215708 -0.417474 -0.671353
+-0.586004 0.177762 -0.372672 -0.69722
+-0.596375 0.139055 -0.326274 -0.720101
+-0.604193 0.0997527 -0.278478 -0.739899
+-0.609424 0.060023 -0.22949 -0.756528
+-0.612045 0.0200363 -0.17952 -0.769917
+0.612045 0.0200363 -0.0258667 0.790146
+0.609424 0.060023 -0.0774894 0.786763
+0.604193 0.0997527 -0.12878 0.78001
+0.596375 0.139055 -0.17952 0.769917
+0.586004 0.177762 -0.22949 0.756528
+0.573123 0.215708 -0.278478 0.739899
+0.557788 0.25273 -0.326274 0.720101
+0.540064 0.28867 -0.372672 0.69722
+0.520028 0.323374 -0.417474 0.671353
+0.497765 0.356693 -0.460489 0.642612
+0.47337 0.388485 -0.501532 0.611118
+0.446949 0.418613 -0.540427 0.577008
+0.418613 0.446949 -0.577008 0.540427
+0.388485 0.47337 -0.611118 0.501532
+0.356693 0.497765 -0.642612 0.460489
+0.323374 0.520028 -0.671353 0.417474
+0.28867 0.540064 -0.69722 0.372672
+0.25273 0.557788 -0.720101 0.326274
+0.215708 0.573123 -0.739899 0.278478
+0.177762 0.586004 -0.756528 0.22949
+0.139055 0.596375 -0.769917 0.17952
+0.0997526 0.604193 -0.78001 0.12878
+0.0600229 0.609424 -0.786763 0.0774893
+0.0200362 0.612045 -0.790146 0.0258666
+-0.0200363 0.612045 -0.790146 -0.0258668
+-0.060023 0.609424 -0.786763 -0.0774894
+-0.0997527 0.604193 -0.78001 -0.12878
+-0.139055 0.596375 -0.769917 -0.17952
+-0.177762 0.586004 -0.756528 -0.22949
+-0.215708 0.573123 -0.739899 -0.278478
+-0.25273 0.557788 -0.720101 -0.326274
+-0.28867 0.540064 -0.69722 -0.372672
+-0.323374 0.520028 -0.671353 -0.417474
+-0.356693 0.497765 -0.642612 -0.460489
+-0.388485 0.47337 -0.611118 -0.501532
+-0.418613 0.446949 -0.577008 -0.540427
+-0.446949 0.418613 -0.540427 -0.577008
+-0.47337 0.388485 -0.501532 -0.611118
+-0.497765 0.356693 -0.460489 -0.642612
+-0.520028 0.323374 -0.417474 -0.671353
+-0.540064 0.28867 -0.372672 -0.69722
+-0.557788 0.25273 -0.326274 -0.720101
+-0.573123 0.215708 -0.278478 -0.739899
+-0.586004 0.177762 -0.22949 -0.756528
+-0.596375 0.139055 -0.179519 -0.769917
+-0.604193 0.0997527 -0.12878 -0.78001
+-0.609424 0.060023 -0.0774893 -0.786763
+-0.612045 0.0200363 -0.0258668 -0.790146
+0.645152 0.0211201 -0.099691 0.757229
+0.642389 0.0632698 -0.149003 0.749087
+0.636876 0.105149 -0.197676 0.737738
+0.628635 0.146577 -0.245503 0.72323
+0.617702 0.187378 -0.292279 0.705625
+0.604125 0.227376 -0.337804 0.684998
+0.58796 0.266401 -0.381881 0.661438
+0.569278 0.304285 -0.424324 0.635045
+0.548158 0.340866 -0.464949 0.605934
+0.52469 0.375988 -0.503584 0.574227
+0.498976 0.409499 -0.540062 0.540062
+0.471125 0.441257 -0.574227 0.503584
+0.441257 0.471125 -0.605934 0.464949
+0.409499 0.498976 -0.635045 0.424324
+0.375988 0.52469 -0.661438 0.381881
+0.340866 0.548158 -0.684998 0.337804
+0.304285 0.569278 -0.705625 0.292279
+0.266401 0.58796 -0.72323 0.245503
+0.227376 0.604125 -0.737738 0.197676
+0.187378 0.617702 -0.749087 0.149003
+0.146577 0.628635 -0.757229 0.099691
+0.105148 0.636876 -0.762127 0.0499524
+0.0632697 0.642389 -0.763763 -9.38938e-08
+0.02112 0.645152 -0.762127 -0.0499525
+-0.0211201 0.645152 -0.757229 -0.0996911
+-0.0632698 0.642389 -0.749087 -0.149003
+-0.105149 0.636876 -0.737738 -0.197676
+-0.146577 0.628635 -0.72323 -0.245503
+-0.187378 0.617702 -0.705625 -0.292279
+-0.227377 0.604125 -0.684998 -0.337804
+-0.266401 0.58796 -0.661438 -0.381881
+-0.304285 0.569278 -0.635045 -0.424324
+-0.340866 0.548158 -0.605934 -0.464949
+-0.375988 0.52469 -0.574227 -0.503584
+-0.409499 0.498976 -0.540062 -0.540062
+-0.441257 0.471125 -0.503584 -0.574227
+-0.471125 0.441257 -0.464949 -0.605934
+-0.498976 0.409499 -0.424324 -0.635045
+-0.52469 0.375988 -0.381881 -0.661438
+-0.548158 0.340866 -0.337804 -0.684998
+-0.569278 0.304285 -0.292279 -0.705625
+-0.58796 0.266401 -0.245503 -0.72323
+-0.604125 0.227376 -0.197676 -0.737738
+-0.617702 0.187378 -0.149003 -0.749087
+-0.628635 0.146577 -0.099691 -0.757229
+-0.636876 0.105149 -0.0499525 -0.762127
+-0.642389 0.0632698 3.62313e-08 -0.763763
+-0.645152 0.0211201 0.0499524 -0.762127
+0.645152 0.0211201 0.0499525 0.762127
+0.642389 0.0632698 -2.08657e-09 0.763763
+0.636876 0.105149 -0.0499525 0.762127
+0.628635 0.146577 -0.099691 0.757229
+0.617702 0.187378 -0.149003 0.749087
+0.604125 0.227376 -0.197676 0.737738
+0.58796 0.266401 -0.245503 0.72323
+0.569278 0.304285 -0.292279 0.705625
+0.548158 0.340866 -0.337804 0.684998
+0.52469 0.375988 -0.381881 0.661438
+0.498976 0.409499 -0.424324 0.635045
+0.471125 0.441257 -0.464949 0.605934
+0.441257 0.471125 -0.503584 0.574227
+0.409499 0.498976 -0.540062 0.540062
+0.375988 0.52469 -0.574227 0.503584
+0.340866 0.548158 -0.605934 0.464949
+0.304285 0.569278 -0.635045 0.424324
+0.266401 0.58796 -0.661438 0.381881
+0.227376 0.604125 -0.684998 0.337804
+0.187378 0.617702 -0.705625 0.292279
+0.146577 0.628635 -0.72323 0.245503
+0.105148 0.636876 -0.737738 0.197676
+0.0632697 0.642389 -0.749087 0.149003
+0.02112 0.645152 -0.757229 0.0996909
+-0.0211201 0.645152 -0.762127 0.0499524
+-0.0632698 0.642389 -0.763763 -6.39241e-08
+-0.105149 0.636876 -0.762127 -0.0499525
+-0.146577 0.628635 -0.757229 -0.0996911
+-0.187378 0.617702 -0.749087 -0.149003
+-0.227377 0.604125 -0.737738 -0.197676
+-0.266401 0.58796 -0.72323 -0.245504
+-0.304285 0.569278 -0.705625 -0.292279
+-0.340866 0.548158 -0.684998 -0.337804
+-0.375988 0.52469 -0.661438 -0.381881
+-0.409499 0.498976 -0.635045 -0.424324
+-0.441257 0.471125 -0.605934 -0.464949
+-0.471125 0.441257 -0.574227 -0.503584
+-0.498976 0.409499 -0.540062 -0.540062
+-0.52469 0.375988 -0.503584 -0.574227
+-0.548158 0.340866 -0.464949 -0.605934
+-0.569278 0.304285 -0.424324 -0.635045
+-0.58796 0.266401 -0.381881 -0.661438
+-0.604125 0.227376 -0.337803 -0.684998
+-0.617702 0.187378 -0.292279 -0.705625
+-0.628635 0.146577 -0.245503 -0.72323
+-0.636876 0.105149 -0.197676 -0.737738
+-0.642389 0.0632698 -0.149003 -0.749087
+-0.645152 0.0211201 -0.0996911 -0.757229
+0.676641 0.0221509 -0.0240806 0.735586
+0.673743 0.0663579 -0.0721387 0.732436
+0.667961 0.110281 -0.119888 0.72615
+0.659318 0.153731 -0.167124 0.716754
+0.647852 0.196524 -0.213644 0.704289
+0.633611 0.238474 -0.259249 0.688808
+0.616658 0.279404 -0.303744 0.670378
+0.597064 0.319137 -0.346939 0.649076
+0.574913 0.357504 -0.388647 0.624996
+0.5503 0.394339 -0.428692 0.598239
+0.523331 0.429486 -0.466901 0.56892
+0.49412 0.462794 -0.50311 0.537165
+0.462794 0.49412 -0.537165 0.50311
+0.429486 0.523331 -0.56892 0.466901
+0.394339 0.5503 -0.598239 0.428692
+0.357504 0.574913 -0.624996 0.388647
+0.319137 0.597064 -0.649077 0.346939
+0.279404 0.616658 -0.670378 0.303744
+0.238474 0.633611 -0.688808 0.259249
+0.196524 0.647852 -0.704289 0.213644
+0.153731 0.659318 -0.716754 0.167124
+0.110281 0.667961 -0.72615 0.119888
+0.0663578 0.673743 -0.732436 0.0721386
+0.0221508 0.676641 -0.735586 0.0240805
+-0.022151 0.676641 -0.735586 -0.0240807
+-0.066358 0.673743 -0.732436 -0.0721387
+-0.110281 0.667961 -0.72615 -0.119888
+-0.153731 0.659318 -0.716754 -0.167124
+-0.196524 0.647852 -0.704289 -0.213644
+-0.238475 0.633611 -0.688808 -0.259249
+-0.279404 0.616658 -0.670378 -0.303744
+-0.319137 0.597064 -0.649077 -0.346939
+-0.357504 0.574913 -0.624996 -0.388647
+-0.394339 0.5503 -0.598239 -0.428692
+-0.429486 0.523331 -0.56892 -0.466901
+-0.462794 0.49412 -0.537165 -0.50311
+-0.49412 0.462794 -0.50311 -0.537165
+-0.523331 0.429486 -0.466901 -0.56892
+-0.5503 0.394339 -0.428692 -0.598239
+-0.574913 0.357504 -0.388647 -0.624996
+-0.597063 0.319137 -0.346939 -0.649076
+-0.616658 0.279404 -0.303744 -0.670378
+-0.633611 0.238474 -0.259249 -0.688808
+-0.647852 0.196524 -0.213644 -0.704289
+-0.659318 0.153731 -0.167124 -0.716754
+-0.667961 0.110281 -0.119888 -0.72615
+-0.673743 0.0663579 -0.0721386 -0.732436
+-0.676641 0.022151 -0.0240807 -0.735586
+0.612045 0.0200363 -0.326274 0.720101
+0.609424 0.060023 -0.372672 0.69722
+0.604193 0.0997527 -0.417474 0.671353
+0.596375 0.139055 -0.460489 0.642612
+0.586004 0.177762 -0.501532 0.611118
+0.573123 0.215708 -0.540427 0.577008
+0.557788 0.25273 -0.577008 0.540427
+0.540064 0.28867 -0.611118 0.501532
+0.520028 0.323374 -0.642612 0.460489
+0.497765 0.356693 -0.671353 0.417474
+0.47337 0.388485 -0.69722 0.372672
+0.446949 0.418613 -0.720101 0.326274
+0.418613 0.446949 -0.739899 0.278478
+0.388485 0.47337 -0.756528 0.22949
+0.356693 0.497765 -0.769917 0.17952
+0.323374 0.520028 -0.78001 0.12878
+0.28867 0.540064 -0.786763 0.0774893
+0.25273 0.557788 -0.790146 0.0258667
+0.215708 0.573123 -0.790146 -0.0258668
+0.177762 0.586004 -0.786763 -0.0774894
+0.139055 0.596375 -0.78001 -0.12878
+0.0997526 0.604193 -0.769917 -0.17952
+0.0600229 0.609424 -0.756528 -0.22949
+0.0200362 0.612045 -0.739899 -0.278478
+-0.0200363 0.612045 -0.720101 -0.326274
+-0.060023 0.609424 -0.69722 -0.372672
+-0.0997527 0.604193 -0.671353 -0.417474
+-0.139055 0.596375 -0.642612 -0.460489
+-0.177762 0.586004 -0.611118 -0.501532
+-0.215708 0.573123 -0.577008 -0.540427
+-0.25273 0.557788 -0.540427 -0.577008
+-0.28867 0.540064 -0.501532 -0.611118
+-0.323374 0.520028 -0.460489 -0.642612
+-0.356693 0.497765 -0.417474 -0.671353
+-0.388485 0.47337 -0.372672 -0.69722
+-0.418613 0.446949 -0.326274 -0.720101
+-0.446949 0.418613 -0.278478 -0.739899
+-0.47337 0.388485 -0.22949 -0.756528
+-0.497765 0.356693 -0.17952 -0.769917
+-0.520028 0.323374 -0.12878 -0.78001
+-0.540064 0.28867 -0.0774894 -0.786763
+-0.557788 0.25273 -0.0258667 -0.790146
+-0.573123 0.215708 0.0258668 -0.790146
+-0.586004 0.177762 0.0774893 -0.786763
+-0.596375 0.139055 0.12878 -0.78001
+-0.604193 0.0997527 0.17952 -0.769917
+-0.609424 0.060023 0.22949 -0.756528
+-0.612045 0.0200363 0.278478 -0.739899
+0.645152 0.0211201 -0.381881 0.661438
+0.642389 0.0632698 -0.424324 0.635045
+0.636876 0.105149 -0.464949 0.605934
+0.628635 0.146577 -0.503584 0.574227
+0.617702 0.187378 -0.540062 0.540062
+0.604125 0.227376 -0.574227 0.503584
+0.58796 0.266401 -0.605934 0.464949
+0.569278 0.304285 -0.635045 0.424324
+0.548158 0.340866 -0.661438 0.381881
+0.52469 0.375988 -0.684998 0.337804
+0.498976 0.409499 -0.705625 0.292279
+0.471125 0.441257 -0.72323 0.245503
+0.441257 0.471125 -0.737738 0.197676
+0.409499 0.498976 -0.749087 0.149003
+0.375988 0.52469 -0.757229 0.099691
+0.340866 0.548158 -0.762127 0.0499524
+0.304285 0.569278 -0.763763 -6.27856e-08
+0.266401 0.58796 -0.762127 -0.0499525
+0.227376 0.604125 -0.757229 -0.0996911
+0.187378 0.617702 -0.749087 -0.149003
+0.146577 0.628635 -0.737738 -0.197676
+0.105148 0.636876 -0.72323 -0.245504
+0.0632697 0.642389 -0.705625 -0.292279
+0.02112 0.645152 -0.684998 -0.337804
+-0.0211201 0.645152 -0.661438 -0.381881
+-0.0632698 0.642389 -0.635045 -0.424324
+-0.105149 0.636876 -0.605934 -0.464949
+-0.146577 0.628635 -0.574227 -0.503584
+-0.187378 0.617702 -0.540062 -0.540062
+-0.227377 0.604125 -0.503584 -0.574227
+-0.266401 0.58796 -0.464949 -0.605934
+-0.304285 0.569278 -0.424324 -0.635045
+-0.340866 0.548158 -0.381881 -0.661438
+-0.375988 0.52469 -0.337803 -0.684998
+-0.409499 0.498976 -0.292279 -0.705625
+-0.441257 0.471125 -0.245503 -0.72323
+-0.471125 0.441257 -0.197676 -0.737738
+-0.498976 0.409499 -0.149003 -0.749087
+-0.52469 0.375988 -0.0996911 -0.757229
+-0.548158 0.340866 -0.0499524 -0.762127
+-0.569278 0.304285 -8.59245e-08 -0.763763
+-0.58796 0.266401 0.0499525 -0.762127
+-0.604125 0.227376 0.0996911 -0.757229
+-0.617702 0.187378 0.149003 -0.749087
+-0.628635 0.146577 0.197676 -0.737738
+-0.636876 0.105149 0.245503 -0.72323
+-0.642389 0.0632698 0.292279 -0.705625
+-0.645152 0.0211201 0.337804 -0.684998
+0.645152 0.0211201 -0.245503 0.72323
+0.642389 0.0632698 -0.292279 0.705625
+0.636876 0.105149 -0.337804 0.684998
+0.628635 0.146577 -0.381881 0.661438
+0.617702 0.187378 -0.424324 0.635045
+0.604125 0.227376 -0.464949 0.605934
+0.58796 0.266401 -0.503584 0.574227
+0.569278 0.304285 -0.540062 0.540062
+0.548158 0.340866 -0.574227 0.503584
+0.52469 0.375988 -0.605934 0.464949
+0.498976 0.409499 -0.635045 0.424324
+0.471125 0.441257 -0.661438 0.381881
+0.441257 0.471125 -0.684998 0.337804
+0.409499 0.498976 -0.705625 0.292279
+0.375988 0.52469 -0.72323 0.245503
+0.340866 0.548158 -0.737738 0.197676
+0.304285 0.569278 -0.749087 0.149003
+0.266401 0.58796 -0.757229 0.099691
+0.227376 0.604125 -0.762127 0.0499524
+0.187378 0.617702 -0.763763 -3.28159e-08
+0.146577 0.628635 -0.762127 -0.0499525
+0.105148 0.636876 -0.757229 -0.0996911
+0.0632697 0.642389 -0.749087 -0.149003
+0.02112 0.645152 -0.737738 -0.197676
+-0.0211201 0.645152 -0.72323 -0.245504
+-0.0632698 0.642389 -0.705625 -0.292279
+-0.105149 0.636876 -0.684998 -0.337804
+-0.146577 0.628635 -0.661438 -0.381881
+-0.187378 0.617702 -0.635045 -0.424324
+-0.227377 0.604125 -0.605934 -0.464949
+-0.266401 0.58796 -0.574227 -0.503584
+-0.304285 0.569278 -0.540062 -0.540062
+-0.340866 0.548158 -0.503584 -0.574227
+-0.375988 0.52469 -0.464949 -0.605934
+-0.409499 0.498976 -0.424324 -0.635045
+-0.441257 0.471125 -0.381881 -0.661438
+-0.471125 0.441257 -0.337804 -0.684998
+-0.498976 0.409499 -0.292279 -0.705625
+-0.52469 0.375988 -0.245504 -0.72323
+-0.548158 0.340866 -0.197676 -0.737738
+-0.569278 0.304285 -0.149003 -0.749087
+-0.58796 0.266401 -0.099691 -0.757229
+-0.604125 0.227376 -0.0499524 -0.762127
+-0.617702 0.187378 -2.48466e-08 -0.763763
+-0.628635 0.146577 0.0499525 -0.762127
+-0.636876 0.105149 0.099691 -0.757229
+-0.642389 0.0632698 0.149003 -0.749087
+-0.645152 0.0211201 0.197676 -0.737738
+0.676641 0.0221509 -0.303744 0.670378
+0.673743 0.0663579 -0.346939 0.649076
+0.667961 0.110281 -0.388647 0.624996
+0.659318 0.153731 -0.428692 0.598239
+0.647852 0.196524 -0.466901 0.56892
+0.633611 0.238474 -0.50311 0.537165
+0.616658 0.279404 -0.537165 0.50311
+0.597064 0.319137 -0.56892 0.466901
+0.574913 0.357504 -0.598239 0.428692
+0.5503 0.394339 -0.624996 0.388647
+0.523331 0.429486 -0.649076 0.346939
+0.49412 0.462794 -0.670378 0.303744
+0.462794 0.49412 -0.688808 0.259249
+0.429486 0.523331 -0.704289 0.213644
+0.394339 0.5503 -0.716754 0.167124
+0.357504 0.574913 -0.72615 0.119888
+0.319137 0.597064 -0.732436 0.0721386
+0.279404 0.616658 -0.735586 0.0240805
+0.238474 0.633611 -0.735586 -0.0240806
+0.196524 0.647852 -0.732436 -0.0721387
+0.153731 0.659318 -0.72615 -0.119888
+0.110281 0.667961 -0.716754 -0.167124
+0.0663578 0.673743 -0.704289 -0.213644
+0.0221508 0.676641 -0.688808 -0.259249
+-0.022151 0.676641 -0.670378 -0.303744
+-0.066358 0.673743 -0.649076 -0.346939
+-0.110281 0.667961 -0.624996 -0.388647
+-0.153731 0.659318 -0.598239 -0.428692
+-0.196524 0.647852 -0.56892 -0.466901
+-0.238475 0.633611 -0.537165 -0.50311
+-0.279404 0.616658 -0.50311 -0.537165
+-0.319137 0.597064 -0.466901 -0.56892
+-0.357504 0.574913 -0.428692 -0.598239
+-0.394339 0.5503 -0.388647 -0.624996
+-0.429486 0.523331 -0.346939 -0.649076
+-0.462794 0.49412 -0.303744 -0.670378
+-0.49412 0.462794 -0.259249 -0.688808
+-0.523331 0.429486 -0.213644 -0.704289
+-0.5503 0.394339 -0.167124 -0.716754
+-0.574913 0.357504 -0.119888 -0.72615
+-0.597063 0.319137 -0.0721387 -0.732436
+-0.616658 0.279404 -0.0240806 -0.735586
+-0.633611 0.238474 0.0240807 -0.735586
+-0.647852 0.196524 0.0721386 -0.732436
+-0.659318 0.153731 0.119888 -0.72615
+-0.667961 0.110281 0.167124 -0.716754
+-0.673743 0.0663579 0.213644 -0.704289
+-0.676641 0.022151 0.259249 -0.688808
+0.676641 0.0221509 -0.428692 0.598239
+0.673743 0.0663579 -0.466901 0.56892
+0.667961 0.110281 -0.50311 0.537165
+0.659318 0.153731 -0.537165 0.50311
+0.647852 0.196524 -0.56892 0.466901
+0.633611 0.238474 -0.598239 0.428692
+0.616658 0.279404 -0.624996 0.388647
+0.597064 0.319137 -0.649076 0.346939
+0.574913 0.357504 -0.670378 0.303744
+0.5503 0.394339 -0.688808 0.259249
+0.523331 0.429486 -0.704289 0.213644
+0.49412 0.462794 -0.716754 0.167124
+0.462794 0.49412 -0.72615 0.119888
+0.429486 0.523331 -0.732436 0.0721387
+0.394339 0.5503 -0.735586 0.0240806
+0.357504 0.574913 -0.735586 -0.0240807
+0.319137 0.597064 -0.732436 -0.0721387
+0.279404 0.616658 -0.72615 -0.119888
+0.238474 0.633611 -0.716754 -0.167124
+0.196524 0.647852 -0.704289 -0.213644
+0.153731 0.659318 -0.688808 -0.259249
+0.110281 0.667961 -0.670378 -0.303744
+0.0663578 0.673743 -0.649076 -0.346939
+0.0221508 0.676641 -0.624996 -0.388647
+-0.022151 0.676641 -0.598239 -0.428692
+-0.066358 0.673743 -0.56892 -0.466901
+-0.110281 0.667961 -0.537165 -0.50311
+-0.153731 0.659318 -0.50311 -0.537165
+-0.196524 0.647852 -0.466901 -0.56892
+-0.238475 0.633611 -0.428692 -0.598239
+-0.279404 0.616658 -0.388647 -0.624996
+-0.319137 0.597064 -0.346939 -0.649076
+-0.357504 0.574913 -0.303744 -0.670378
+-0.394339 0.5503 -0.259249 -0.688808
+-0.429486 0.523331 -0.213644 -0.704289
+-0.462794 0.49412 -0.167124 -0.716754
+-0.49412 0.462794 -0.119888 -0.72615
+-0.523331 0.429486 -0.0721386 -0.732436
+-0.5503 0.394339 -0.0240807 -0.735586
+-0.574913 0.357504 0.0240806 -0.735586
+-0.597063 0.319137 0.0721386 -0.732436
+-0.616658 0.279404 0.119888 -0.72615
+-0.633611 0.238474 0.167124 -0.716754
+-0.647852 0.196524 0.213644 -0.704289
+-0.659318 0.153731 0.259249 -0.688808
+-0.667961 0.110281 0.303744 -0.670378
+-0.673743 0.0663579 0.346939 -0.649076
+-0.676641 0.022151 0.388647 -0.624996
+0.706728 0.0231359 -0.466228 0.531631
+0.703702 0.0693086 -0.5 0.5
+0.697662 0.115184 -0.531631 0.466228
+0.688635 0.160567 -0.560986 0.430459
+0.676659 0.205262 -0.587938 0.392847
+0.661785 0.249078 -0.612372 0.353553
+0.644078 0.291828 -0.634185 0.312745
+0.623612 0.333328 -0.653281 0.270598
+0.600477 0.3734 -0.669581 0.227292
+0.574769 0.411874 -0.683013 0.183013
+0.546601 0.448584 -0.69352 0.13795
+0.516092 0.483373 -0.701057 0.0922959
+0.483373 0.516092 -0.705593 0.046247
+0.448584 0.546601 -0.707107 -1.58103e-09
+0.411874 0.574769 -0.705593 -0.046247
+0.3734 0.600477 -0.701057 -0.092296
+0.333328 0.623613 -0.69352 -0.13795
+0.291828 0.644078 -0.683013 -0.183013
+0.249078 0.661785 -0.669581 -0.227292
+0.205262 0.676659 -0.653281 -0.270598
+0.160567 0.688635 -0.634185 -0.312745
+0.115184 0.697662 -0.612372 -0.353553
+0.0693085 0.703702 -0.587938 -0.392848
+0.0231358 0.706728 -0.560985 -0.430459
+-0.023136 0.706728 -0.531631 -0.466228
+-0.0693086 0.703702 -0.5 -0.5
+-0.115185 0.697662 -0.466228 -0.531631
+-0.160567 0.688635 -0.430459 -0.560986
+-0.205262 0.676659 -0.392847 -0.587938
+-0.249078 0.661785 -0.353553 -0.612372
+-0.291828 0.644078 -0.312745 -0.634185
+-0.333328 0.623613 -0.270598 -0.653281
+-0.3734 0.600477 -0.227292 -0.669581
+-0.411874 0.574769 -0.183013 -0.683013
+-0.448584 0.546601 -0.13795 -0.69352
+-0.483373 0.516092 -0.0922959 -0.701057
+-0.516092 0.483373 -0.046247 -0.705593
+-0.546601 0.448584 3.24897e-08 -0.707107
+-0.574769 0.411874 0.0462469 -0.705593
+-0.600477 0.3734 0.092296 -0.701057
+-0.623612 0.333328 0.13795 -0.69352
+-0.644078 0.291828 0.183013 -0.683013
+-0.661785 0.249078 0.227292 -0.669581
+-0.676659 0.205262 0.270598 -0.653281
+-0.688635 0.160567 0.312745 -0.634185
+-0.697662 0.115185 0.353553 -0.612372
+-0.703702 0.0693086 0.392848 -0.587938
+-0.706728 0.0231359 0.430459 -0.560986
+0.706728 0.0231359 -0.353553 0.612372
+0.703702 0.0693086 -0.392847 0.587938
+0.697662 0.115184 -0.430459 0.560986
+0.688635 0.160567 -0.466228 0.531631
+0.676659 0.205262 -0.5 0.5
+0.661785 0.249078 -0.531631 0.466228
+0.644078 0.291828 -0.560986 0.430459
+0.623612 0.333328 -0.587938 0.392847
+0.600477 0.3734 -0.612372 0.353553
+0.574769 0.411874 -0.634185 0.312745
+0.546601 0.448584 -0.653281 0.270598
+0.516092 0.483373 -0.669581 0.227292
+0.483373 0.516092 -0.683013 0.183013
+0.448584 0.546601 -0.69352 0.13795
+0.411874 0.574769 -0.701057 0.0922959
+0.3734 0.600477 -0.705593 0.0462469
+0.333328 0.623613 -0.707107 -5.81282e-08
+0.291828 0.644078 -0.705593 -0.046247
+0.249078 0.661785 -0.701057 -0.092296
+0.205262 0.676659 -0.69352 -0.13795
+0.160567 0.688635 -0.683013 -0.183013
+0.115184 0.697662 -0.669581 -0.227292
+0.0693085 0.703702 -0.653281 -0.270598
+0.0231358 0.706728 -0.634185 -0.312745
+-0.023136 0.706728 -0.612372 -0.353553
+-0.0693086 0.703702 -0.587938 -0.392848
+-0.115185 0.697662 -0.560985 -0.430459
+-0.160567 0.688635 -0.531631 -0.466228
+-0.205262 0.676659 -0.5 -0.5
+-0.249078 0.661785 -0.466228 -0.531631
+-0.291828 0.644078 -0.430459 -0.560986
+-0.333328 0.623613 -0.392848 -0.587938
+-0.3734 0.600477 -0.353553 -0.612372
+-0.411874 0.574769 -0.312745 -0.634185
+-0.448584 0.546601 -0.270598 -0.653281
+-0.483373 0.516092 -0.227292 -0.669581
+-0.516092 0.483373 -0.183013 -0.683013
+-0.546601 0.448584 -0.13795 -0.69352
+-0.574769 0.411874 -0.092296 -0.701057
+-0.600477 0.3734 -0.046247 -0.705593
+-0.623612 0.333328 -7.95506e-08 -0.707107
+-0.644078 0.291828 0.046247 -0.705593
+-0.661785 0.249078 0.092296 -0.701057
+-0.676659 0.205262 0.13795 -0.69352
+-0.688635 0.160567 0.183013 -0.683013
+-0.697662 0.115185 0.227292 -0.669581
+-0.703702 0.0693086 0.270598 -0.653281
+-0.706728 0.0231359 0.312745 -0.634185
+0.735586 0.0240806 -0.394339 0.5503
+0.732436 0.0721387 -0.429486 0.523331
+0.72615 0.119888 -0.462794 0.49412
+0.716754 0.167124 -0.49412 0.462794
+0.704289 0.213644 -0.523331 0.429486
+0.688808 0.259249 -0.5503 0.394339
+0.670378 0.303744 -0.574913 0.357504
+0.649076 0.346939 -0.597064 0.319137
+0.624996 0.388647 -0.616658 0.279404
+0.598239 0.428692 -0.633611 0.238474
+0.56892 0.466901 -0.647852 0.196524
+0.537165 0.50311 -0.659318 0.153731
+0.50311 0.537165 -0.667961 0.110281
+0.466901 0.56892 -0.673743 0.0663579
+0.428692 0.598239 -0.676641 0.0221509
+0.388647 0.624996 -0.676641 -0.022151
+0.346939 0.649077 -0.673743 -0.066358
+0.303744 0.670378 -0.667961 -0.110281
+0.259249 0.688808 -0.659318 -0.153731
+0.213644 0.704289 -0.647852 -0.196524
+0.167124 0.716754 -0.633611 -0.238474
+0.119888 0.72615 -0.616658 -0.279404
+0.0721386 0.732436 -0.597063 -0.319137
+0.0240805 0.735586 -0.574913 -0.357504
+-0.0240807 0.735586 -0.5503 -0.394339
+-0.0721387 0.732436 -0.523331 -0.429486
+-0.119888 0.72615 -0.49412 -0.462794
+-0.167124 0.716754 -0.462794 -0.49412
+-0.213644 0.704289 -0.429486 -0.523331
+-0.259249 0.688808 -0.394339 -0.5503
+-0.303744 0.670378 -0.357504 -0.574913
+-0.346939 0.649077 -0.319137 -0.597063
+-0.388647 0.624996 -0.279404 -0.616658
+-0.428692 0.598239 -0.238474 -0.633611
+-0.466901 0.56892 -0.196524 -0.647852
+-0.50311 0.537165 -0.153731 -0.659318
+-0.537165 0.50311 -0.110281 -0.667961
+-0.56892 0.466901 -0.0663579 -0.673743
+-0.598239 0.428692 -0.022151 -0.676641
+-0.624996 0.388647 0.0221509 -0.676641
+-0.649076 0.346939 0.0663578 -0.673743
+-0.670378 0.303744 0.110281 -0.667961
+-0.688808 0.259249 0.153731 -0.659318
+-0.704289 0.213644 0.196524 -0.647852
+-0.716754 0.167124 0.238474 -0.633611
+-0.72615 0.119888 0.279404 -0.616658
+-0.732436 0.0721386 0.319137 -0.597064
+-0.735586 0.0240807 0.357504 -0.574913
+0.676641 0.0221509 -0.167124 0.716754
+0.673743 0.0663579 -0.213644 0.704289
+0.667961 0.110281 -0.259249 0.688808
+0.659318 0.153731 -0.303744 0.670378
+0.647852 0.196524 -0.346939 0.649076
+0.633611 0.238474 -0.388647 0.624996
+0.616658 0.279404 -0.428692 0.598239
+0.597064 0.319137 -0.466901 0.56892
+0.574913 0.357504 -0.50311 0.537165
+0.5503 0.394339 -0.537165 0.50311
+0.523331 0.429486 -0.56892 0.466901
+0.49412 0.462794 -0.598239 0.428692
+0.462794 0.49412 -0.624996 0.388647
+0.429486 0.523331 -0.649076 0.346939
+0.394339 0.5503 -0.670378 0.303744
+0.357504 0.574913 -0.688808 0.259249
+0.319137 0.597064 -0.704289 0.213644
+0.279404 0.616658 -0.716754 0.167124
+0.238474 0.633611 -0.72615 0.119888
+0.196524 0.647852 -0.732436 0.0721386
+0.153731 0.659318 -0.735586 0.0240806
+0.110281 0.667961 -0.735586 -0.0240807
+0.0663578 0.673743 -0.732436 -0.0721388
+0.0221508 0.676641 -0.72615 -0.119888
+-0.022151 0.676641 -0.716754 -0.167124
+-0.066358 0.673743 -0.704289 -0.213644
+-0.110281 0.667961 -0.688808 -0.259249
+-0.153731 0.659318 -0.670378 -0.303744
+-0.196524 0.647852 -0.649076 -0.346939
+-0.238475 0.633611 -0.624996 -0.388648
+-0.279404 0.616658 -0.598239 -0.428692
+-0.319137 0.597064 -0.56892 -0.466901
+-0.357504 0.574913 -0.537165 -0.50311
+-0.394339 0.5503 -0.50311 -0.537165
+-0.429486 0.523331 -0.466901 -0.56892
+-0.462794 0.49412 -0.428692 -0.598239
+-0.49412 0.462794 -0.388647 -0.624996
+-0.523331 0.429486 -0.346939 -0.649076
+-0.5503 0.394339 -0.303744 -0.670378
+-0.574913 0.357504 -0.259249 -0.688808
+-0.597063 0.319137 -0.213644 -0.704289
+-0.616658 0.279404 -0.167124 -0.716754
+-0.633611 0.238474 -0.119888 -0.72615
+-0.647852 0.196524 -0.0721387 -0.732436
+-0.659318 0.153731 -0.0240805 -0.735586
+-0.667961 0.110281 0.0240805 -0.735586
+-0.673743 0.0663579 0.0721387 -0.732436
+-0.676641 0.022151 0.119888 -0.72615
+0.706728 0.0231359 -0.227292 0.669581
+0.703702 0.0693086 -0.270598 0.653281
+0.697662 0.115184 -0.312745 0.634185
+0.688635 0.160567 -0.353553 0.612372
+0.676659 0.205262 -0.392847 0.587938
+0.661785 0.249078 -0.430459 0.560986
+0.644078 0.291828 -0.466228 0.531631
+0.623612 0.333328 -0.5 0.5
+0.600477 0.3734 -0.531631 0.466228
+0.574769 0.411874 -0.560986 0.430459
+0.546601 0.448584 -0.587938 0.392847
+0.516092 0.483373 -0.612372 0.353553
+0.483373 0.516092 -0.634185 0.312745
+0.448584 0.546601 -0.653281 0.270598
+0.411874 0.574769 -0.669581 0.227292
+0.3734 0.600477 -0.683013 0.183013
+0.333328 0.623613 -0.69352 0.13795
+0.291828 0.644078 -0.701057 0.0922959
+0.249078 0.661785 -0.705593 0.046247
+0.205262 0.676659 -0.707107 -3.03816e-08
+0.160567 0.688635 -0.705593 -0.046247
+0.115184 0.697662 -0.701057 -0.0922961
+0.0693085 0.703702 -0.69352 -0.13795
+0.0231358 0.706728 -0.683013 -0.183013
+-0.023136 0.706728 -0.669581 -0.227292
+-0.0693086 0.703702 -0.653281 -0.270598
+-0.115185 0.697662 -0.634185 -0.312745
+-0.160567 0.688635 -0.612372 -0.353553
+-0.205262 0.676659 -0.587938 -0.392848
+-0.249078 0.661785 -0.560985 -0.430459
+-0.291828 0.644078 -0.531631 -0.466228
+-0.333328 0.623613 -0.5 -0.5
+-0.3734 0.600477 -0.466228 -0.531631
+-0.411874 0.574769 -0.430459 -0.560986
+-0.448584 0.546601 -0.392847 -0.587938
+-0.483373 0.516092 -0.353553 -0.612372
+-0.516092 0.483373 -0.312745 -0.634185
+-0.546601 0.448584 -0.270598 -0.653281
+-0.574769 0.411874 -0.227292 -0.669581
+-0.600477 0.3734 -0.183013 -0.683013
+-0.623612 0.333328 -0.13795 -0.69352
+-0.644078 0.291828 -0.092296 -0.701057
+-0.661785 0.249078 -0.0462469 -0.705593
+-0.676659 0.205262 -2.30035e-08 -0.707107
+-0.688635 0.160567 0.046247 -0.705593
+-0.697662 0.115185 0.0922959 -0.701057
+-0.703702 0.0693086 0.13795 -0.69352
+-0.706728 0.0231359 0.183013 -0.683013
+0.706728 0.0231359 -0.092296 0.701057
+0.703702 0.0693086 -0.13795 0.69352
+0.697662 0.115184 -0.183013 0.683013
+0.688635 0.160567 -0.227292 0.669581
+0.676659 0.205262 -0.270598 0.653281
+0.661785 0.249078 -0.312745 0.634185
+0.644078 0.291828 -0.353553 0.612372
+0.623612 0.333328 -0.392847 0.587938
+0.600477 0.3734 -0.430459 0.560986
+0.574769 0.411874 -0.466228 0.531631
+0.546601 0.448584 -0.5 0.5
+0.516092 0.483373 -0.531631 0.466228
+0.483373 0.516092 -0.560986 0.430459
+0.448584 0.546601 -0.587938 0.392847
+0.411874 0.574769 -0.612372 0.353553
+0.3734 0.600477 -0.634185 0.312745
+0.333328 0.623613 -0.653282 0.270598
+0.291828 0.644078 -0.669581 0.227292
+0.249078 0.661785 -0.683013 0.183013
+0.205262 0.676659 -0.69352 0.13795
+0.160567 0.688635 -0.701057 0.0922959
+0.115184 0.697662 -0.705593 0.0462469
+0.0693085 0.703702 -0.707107 -8.69287e-08
+0.0231358 0.706728 -0.705593 -0.0462471
+-0.023136 0.706728 -0.701057 -0.092296
+-0.0693086 0.703702 -0.69352 -0.13795
+-0.115185 0.697662 -0.683013 -0.183013
+-0.160567 0.688635 -0.669581 -0.227292
+-0.205262 0.676659 -0.653281 -0.270598
+-0.249078 0.661785 -0.634185 -0.312745
+-0.291828 0.644078 -0.612372 -0.353553
+-0.333328 0.623613 -0.587938 -0.392847
+-0.3734 0.600477 -0.560986 -0.430459
+-0.411874 0.574769 -0.531631 -0.466228
+-0.448584 0.546601 -0.5 -0.5
+-0.483373 0.516092 -0.466228 -0.531631
+-0.516092 0.483373 -0.430459 -0.560986
+-0.546601 0.448584 -0.392847 -0.587938
+-0.574769 0.411874 -0.353553 -0.612372
+-0.600477 0.3734 -0.312745 -0.634185
+-0.623612 0.333328 -0.270598 -0.653281
+-0.644078 0.291828 -0.227292 -0.669581
+-0.661785 0.249078 -0.183013 -0.683013
+-0.676659 0.205262 -0.13795 -0.69352
+-0.688635 0.160567 -0.0922959 -0.701057
+-0.697662 0.115185 -0.046247 -0.705593
+-0.703702 0.0693086 3.35437e-08 -0.707107
+-0.706728 0.0231359 0.0462469 -0.705593
+0.735586 0.0240806 -0.153731 0.659318
+0.732436 0.0721387 -0.196524 0.647852
+0.72615 0.119888 -0.238474 0.633611
+0.716754 0.167124 -0.279404 0.616658
+0.704289 0.213644 -0.319137 0.597064
+0.688808 0.259249 -0.357504 0.574913
+0.670378 0.303744 -0.394339 0.5503
+0.649076 0.346939 -0.429486 0.523331
+0.624996 0.388647 -0.462794 0.49412
+0.598239 0.428692 -0.49412 0.462794
+0.56892 0.466901 -0.523331 0.429486
+0.537165 0.50311 -0.5503 0.394339
+0.50311 0.537165 -0.574913 0.357504
+0.466901 0.56892 -0.597064 0.319137
+0.428692 0.598239 -0.616658 0.279404
+0.388647 0.624996 -0.633611 0.238474
+0.346939 0.649077 -0.647852 0.196524
+0.303744 0.670378 -0.659318 0.153731
+0.259249 0.688808 -0.667961 0.110281
+0.213644 0.704289 -0.673743 0.0663579
+0.167124 0.716754 -0.676641 0.0221509
+0.119888 0.72615 -0.676641 -0.022151
+0.0721386 0.732436 -0.673743 -0.066358
+0.0240805 0.735586 -0.667961 -0.110281
+-0.0240807 0.735586 -0.659318 -0.153731
+-0.0721387 0.732436 -0.647852 -0.196524
+-0.119888 0.72615 -0.633611 -0.238474
+-0.167124 0.716754 -0.616658 -0.279404
+-0.213644 0.704289 -0.597063 -0.319137
+-0.259249 0.688808 -0.574913 -0.357504
+-0.303744 0.670378 -0.5503 -0.394339
+-0.346939 0.649077 -0.523331 -0.429486
+-0.388647 0.624996 -0.49412 -0.462794
+-0.428692 0.598239 -0.462794 -0.49412
+-0.466901 0.56892 -0.429486 -0.523331
+-0.50311 0.537165 -0.394339 -0.5503
+-0.537165 0.50311 -0.357504 -0.574913
+-0.56892 0.466901 -0.319137 -0.597064
+-0.598239 0.428692 -0.279404 -0.616658
+-0.624996 0.388647 -0.238474 -0.633611
+-0.649076 0.346939 -0.196524 -0.647852
+-0.670378 0.303744 -0.153731 -0.659318
+-0.688808 0.259249 -0.110281 -0.667961
+-0.704289 0.213644 -0.0663579 -0.673743
+-0.716754 0.167124 -0.0221509 -0.676641
+-0.72615 0.119888 0.0221509 -0.676641
+-0.732436 0.0721386 0.0663579 -0.673743
+-0.735586 0.0240807 0.110281 -0.667961
+0.735586 0.0240806 -0.279404 0.616658
+0.732436 0.0721387 -0.319137 0.597064
+0.72615 0.119888 -0.357504 0.574913
+0.716754 0.167124 -0.394339 0.5503
+0.704289 0.213644 -0.429486 0.523331
+0.688808 0.259249 -0.462794 0.49412
+0.670378 0.303744 -0.49412 0.462794
+0.649076 0.346939 -0.523331 0.429486
+0.624996 0.388647 -0.5503 0.394339
+0.598239 0.428692 -0.574913 0.357504
+0.56892 0.466901 -0.597064 0.319137
+0.537165 0.50311 -0.616658 0.279404
+0.50311 0.537165 -0.633611 0.238474
+0.466901 0.56892 -0.647852 0.196524
+0.428692 0.598239 -0.659318 0.153731
+0.388647 0.624996 -0.667961 0.110281
+0.346939 0.649077 -0.673743 0.0663579
+0.303744 0.670378 -0.676641 0.0221509
+0.259249 0.688808 -0.676641 -0.022151
+0.213644 0.704289 -0.673743 -0.0663579
+0.167124 0.716754 -0.667961 -0.110281
+0.119888 0.72615 -0.659318 -0.153731
+0.0721386 0.732436 -0.647852 -0.196524
+0.0240805 0.735586 -0.633611 -0.238474
+-0.0240807 0.735586 -0.616658 -0.279404
+-0.0721387 0.732436 -0.597063 -0.319137
+-0.119888 0.72615 -0.574913 -0.357504
+-0.167124 0.716754 -0.5503 -0.394339
+-0.213644 0.704289 -0.52333 -0.429486
+-0.259249 0.688808 -0.49412 -0.462794
+-0.303744 0.670378 -0.462794 -0.49412
+-0.346939 0.649077 -0.429486 -0.523331
+-0.388647 0.624996 -0.394339 -0.5503
+-0.428692 0.598239 -0.357504 -0.574913
+-0.466901 0.56892 -0.319137 -0.597064
+-0.50311 0.537165 -0.279404 -0.616658
+-0.537165 0.50311 -0.238474 -0.633611
+-0.56892 0.466901 -0.196524 -0.647852
+-0.598239 0.428692 -0.153731 -0.659318
+-0.624996 0.388647 -0.110281 -0.667961
+-0.649076 0.346939 -0.066358 -0.673743
+-0.670378 0.303744 -0.0221509 -0.676641
+-0.688808 0.259249 0.022151 -0.676641
+-0.704289 0.213644 0.0663579 -0.673743
+-0.716754 0.167124 0.110281 -0.667961
+-0.72615 0.119888 0.153731 -0.659318
+-0.732436 0.0721386 0.196524 -0.647852
+-0.735586 0.0240807 0.238474 -0.633611
+0.763354 0.0249896 -0.322749 0.559017
+0.760085 0.0748618 -0.358619 0.536711
+0.753561 0.124413 -0.392954 0.512107
+0.743811 0.173432 -0.425606 0.485311
+0.730875 0.221709 -0.456435 0.456435
+0.71481 0.269035 -0.485311 0.425606
+0.695684 0.31521 -0.512107 0.392954
+0.673578 0.360035 -0.536711 0.358619
+0.648589 0.403318 -0.559017 0.322749
+0.620822 0.444875 -0.578929 0.285496
+0.590396 0.484526 -0.596362 0.247021
+0.557443 0.522102 -0.611241 0.207488
+0.522102 0.557443 -0.623502 0.167067
+0.484526 0.590396 -0.633094 0.12593
+0.444875 0.620822 -0.639975 0.0842543
+0.403318 0.648589 -0.644115 0.0422175
+0.360035 0.673579 -0.645497 -5.30635e-08
+0.31521 0.695684 -0.644115 -0.0422176
+0.269035 0.71481 -0.639975 -0.0842543
+0.221709 0.730875 -0.633094 -0.12593
+0.173432 0.743811 -0.623502 -0.167067
+0.124413 0.753561 -0.611241 -0.207488
+0.0748617 0.760085 -0.596362 -0.247021
+0.0249895 0.763354 -0.578929 -0.285496
+-0.0249897 0.763354 -0.559017 -0.322749
+-0.0748619 0.760085 -0.536711 -0.358619
+-0.124414 0.753561 -0.512107 -0.392954
+-0.173432 0.743811 -0.48531 -0.425606
+-0.221709 0.730875 -0.456435 -0.456436
+-0.269036 0.71481 -0.425606 -0.485311
+-0.31521 0.695684 -0.392954 -0.512107
+-0.360035 0.673579 -0.358619 -0.536711
+-0.403318 0.648589 -0.322749 -0.559017
+-0.444875 0.620822 -0.285496 -0.578929
+-0.484526 0.590397 -0.247021 -0.596362
+-0.522102 0.557443 -0.207488 -0.611241
+-0.557443 0.522102 -0.167067 -0.623502
+-0.590397 0.484526 -0.12593 -0.633094
+-0.620822 0.444875 -0.0842544 -0.639975
+-0.648589 0.403318 -0.0422175 -0.644115
+-0.673578 0.360035 -7.26194e-08 -0.645497
+-0.695684 0.31521 0.0422175 -0.644115
+-0.71481 0.269035 0.0842544 -0.639975
+-0.730875 0.221709 0.12593 -0.633094
+-0.743811 0.173432 0.167067 -0.623502
+-0.753561 0.124414 0.207488 -0.611241
+-0.760085 0.0748618 0.247021 -0.596362
+-0.763354 0.0249897 0.285496 -0.578929
+0.763354 0.0249896 -0.207488 0.611241
+0.760085 0.0748618 -0.247021 0.596362
+0.753561 0.124413 -0.285496 0.578929
+0.743811 0.173432 -0.322749 0.559017
+0.730875 0.221709 -0.358619 0.536711
+0.71481 0.269035 -0.392954 0.512107
+0.695684 0.31521 -0.425606 0.485311
+0.673578 0.360035 -0.456435 0.456435
+0.648589 0.403318 -0.485311 0.425606
+0.620822 0.444875 -0.512107 0.392954
+0.590396 0.484526 -0.536711 0.358619
+0.557443 0.522102 -0.559017 0.322749
+0.522102 0.557443 -0.578929 0.285496
+0.484526 0.590396 -0.596362 0.247021
+0.444875 0.620822 -0.611241 0.207488
+0.403318 0.648589 -0.623502 0.167067
+0.360035 0.673579 -0.633094 0.12593
+0.31521 0.695684 -0.639975 0.0842543
+0.269035 0.71481 -0.644115 0.0422175
+0.221709 0.730875 -0.645497 -2.77345e-08
+0.173432 0.743811 -0.644115 -0.0422176
+0.124413 0.753561 -0.639975 -0.0842544
+0.0748617 0.760085 -0.633094 -0.12593
+0.0249895 0.763354 -0.623502 -0.167067
+-0.0249897 0.763354 -0.611241 -0.207488
+-0.0748619 0.760085 -0.596362 -0.247021
+-0.124414 0.753561 -0.578929 -0.285496
+-0.173432 0.743811 -0.559017 -0.322749
+-0.221709 0.730875 -0.536711 -0.358619
+-0.269036 0.71481 -0.512107 -0.392954
+-0.31521 0.695684 -0.48531 -0.425606
+-0.360035 0.673579 -0.456436 -0.456435
+-0.403318 0.648589 -0.425606 -0.485311
+-0.444875 0.620822 -0.392954 -0.512107
+-0.484526 0.590397 -0.358619 -0.536711
+-0.522102 0.557443 -0.322749 -0.559017
+-0.557443 0.522102 -0.285496 -0.578929
+-0.590397 0.484526 -0.247021 -0.596362
+-0.620822 0.444875 -0.207488 -0.611241
+-0.648589 0.403318 -0.167067 -0.623502
+-0.673578 0.360035 -0.12593 -0.633094
+-0.695684 0.31521 -0.0842543 -0.639975
+-0.71481 0.269035 -0.0422175 -0.644115
+-0.730875 0.221709 -2.09992e-08 -0.645497
+-0.743811 0.173432 0.0422176 -0.644115
+-0.753561 0.124414 0.0842543 -0.639975
+-0.760085 0.0748618 0.12593 -0.633094
+-0.763354 0.0249897 0.167067 -0.623502
+0.790146 0.0258667 -0.25273 0.557788
+0.786763 0.0774894 -0.28867 0.540064
+0.78001 0.12878 -0.323374 0.520028
+0.769917 0.17952 -0.356693 0.497765
+0.756528 0.22949 -0.388485 0.47337
+0.739899 0.278478 -0.418613 0.446949
+0.720101 0.326274 -0.446949 0.418613
+0.69722 0.372672 -0.47337 0.388485
+0.671353 0.417474 -0.497765 0.356693
+0.642612 0.460489 -0.520028 0.323374
+0.611118 0.501532 -0.540064 0.28867
+0.577008 0.540427 -0.557788 0.25273
+0.540427 0.577008 -0.573123 0.215708
+0.501532 0.611118 -0.586004 0.177762
+0.460489 0.642612 -0.596375 0.139055
+0.417474 0.671353 -0.604193 0.0997526
+0.372672 0.69722 -0.609424 0.0600229
+0.326274 0.720101 -0.612045 0.0200362
+0.278478 0.739899 -0.612045 -0.0200363
+0.22949 0.756528 -0.609424 -0.060023
+0.17952 0.769917 -0.604193 -0.0997527
+0.12878 0.78001 -0.596375 -0.139055
+0.0774893 0.786763 -0.586004 -0.177762
+0.0258666 0.790146 -0.573123 -0.215708
+-0.0258668 0.790146 -0.557788 -0.25273
+-0.0774894 0.786763 -0.540064 -0.28867
+-0.12878 0.78001 -0.520028 -0.323374
+-0.17952 0.769917 -0.497765 -0.356693
+-0.22949 0.756528 -0.47337 -0.388485
+-0.278478 0.739899 -0.446949 -0.418613
+-0.326274 0.720101 -0.418613 -0.446949
+-0.372672 0.69722 -0.388485 -0.47337
+-0.417474 0.671353 -0.356693 -0.497765
+-0.460489 0.642612 -0.323374 -0.520028
+-0.501532 0.611118 -0.28867 -0.540064
+-0.540427 0.577008 -0.25273 -0.557788
+-0.577008 0.540427 -0.215708 -0.573123
+-0.611118 0.501532 -0.177762 -0.586004
+-0.642612 0.460489 -0.139055 -0.596375
+-0.671353 0.417474 -0.0997527 -0.604193
+-0.69722 0.372672 -0.0600231 -0.609424
+-0.720101 0.326274 -0.0200363 -0.612045
+-0.739899 0.278478 0.0200363 -0.612045
+-0.756528 0.22949 0.060023 -0.609424
+-0.769917 0.179519 0.0997527 -0.604193
+-0.78001 0.12878 0.139055 -0.596375
+-0.786763 0.0774893 0.177762 -0.586004
+-0.790146 0.0258668 0.215708 -0.573123
+0.612045 0.0200363 0.278478 0.739899
+0.609424 0.060023 0.22949 0.756528
+0.604193 0.0997527 0.17952 0.769917
+0.596375 0.139055 0.12878 0.78001
+0.586004 0.177762 0.0774894 0.786763
+0.573123 0.215708 0.0258667 0.790146
+0.557788 0.25273 -0.0258667 0.790146
+0.540064 0.28867 -0.0774894 0.786763
+0.520028 0.323374 -0.12878 0.78001
+0.497765 0.356693 -0.17952 0.769917
+0.47337 0.388485 -0.22949 0.756528
+0.446949 0.418613 -0.278478 0.739899
+0.418613 0.446949 -0.326274 0.720101
+0.388485 0.47337 -0.372672 0.69722
+0.356693 0.497765 -0.417474 0.671353
+0.323374 0.520028 -0.460489 0.642612
+0.28867 0.540064 -0.501532 0.611118
+0.25273 0.557788 -0.540427 0.577008
+0.215708 0.573123 -0.577008 0.540427
+0.177762 0.586004 -0.611118 0.501532
+0.139055 0.596375 -0.642612 0.460489
+0.0997526 0.604193 -0.671353 0.417474
+0.0600229 0.609424 -0.69722 0.372672
+0.0200362 0.612045 -0.720101 0.326273
+-0.0200363 0.612045 -0.739899 0.278478
+-0.060023 0.609424 -0.756528 0.22949
+-0.0997527 0.604193 -0.769917 0.179519
+-0.139055 0.596375 -0.78001 0.12878
+-0.177762 0.586004 -0.786763 0.0774892
+-0.215708 0.573123 -0.790146 0.0258666
+-0.25273 0.557788 -0.790146 -0.0258668
+-0.28867 0.540064 -0.786763 -0.0774893
+-0.323374 0.520028 -0.78001 -0.12878
+-0.356693 0.497765 -0.769917 -0.17952
+-0.388485 0.47337 -0.756528 -0.22949
+-0.418613 0.446949 -0.739899 -0.278478
+-0.446949 0.418613 -0.720101 -0.326274
+-0.47337 0.388485 -0.69722 -0.372672
+-0.497765 0.356693 -0.671353 -0.417474
+-0.520028 0.323374 -0.642612 -0.460489
+-0.540064 0.28867 -0.611118 -0.501532
+-0.557788 0.25273 -0.577008 -0.540427
+-0.573123 0.215708 -0.540427 -0.577008
+-0.586004 0.177762 -0.501532 -0.611118
+-0.596375 0.139055 -0.460489 -0.642612
+-0.604193 0.0997527 -0.417474 -0.671353
+-0.609424 0.060023 -0.372672 -0.69722
+-0.612045 0.0200363 -0.326274 -0.720101
+0.645152 0.0211201 0.197676 0.737738
+0.642389 0.0632698 0.149003 0.749087
+0.636876 0.105149 0.099691 0.757229
+0.628635 0.146577 0.0499525 0.762127
+0.617702 0.187378 -5.69236e-10 0.763763
+0.604125 0.227376 -0.0499525 0.762127
+0.58796 0.266401 -0.099691 0.757229
+0.569278 0.304285 -0.149003 0.749087
+0.548158 0.340866 -0.197676 0.737738
+0.52469 0.375988 -0.245503 0.72323
+0.498976 0.409499 -0.292279 0.705625
+0.471125 0.441257 -0.337804 0.684998
+0.441257 0.471125 -0.381881 0.661438
+0.409499 0.498976 -0.424324 0.635045
+0.375988 0.52469 -0.464949 0.605934
+0.340866 0.548158 -0.503584 0.574227
+0.304285 0.569278 -0.540062 0.540062
+0.266401 0.58796 -0.574227 0.503584
+0.227376 0.604125 -0.605934 0.464949
+0.187378 0.617702 -0.635045 0.424324
+0.146577 0.628635 -0.661438 0.381881
+0.105148 0.636876 -0.684998 0.337803
+0.0632697 0.642389 -0.705625 0.292279
+0.02112 0.645152 -0.72323 0.245503
+-0.0211201 0.645152 -0.737738 0.197676
+-0.0632698 0.642389 -0.749087 0.149003
+-0.105149 0.636876 -0.757229 0.099691
+-0.146577 0.628635 -0.762127 0.0499524
+-0.187378 0.617702 -0.763763 -1.25002e-07
+-0.227377 0.604125 -0.762127 -0.0499526
+-0.266401 0.58796 -0.757229 -0.0996911
+-0.304285 0.569278 -0.749087 -0.149003
+-0.340866 0.548158 -0.737738 -0.197676
+-0.375988 0.52469 -0.72323 -0.245504
+-0.409499 0.498976 -0.705625 -0.292279
+-0.441257 0.471125 -0.684998 -0.337804
+-0.471125 0.441257 -0.661438 -0.381881
+-0.498976 0.409499 -0.635045 -0.424324
+-0.52469 0.375988 -0.605934 -0.464949
+-0.548158 0.340866 -0.574227 -0.503584
+-0.569278 0.304285 -0.540062 -0.540062
+-0.58796 0.266401 -0.503584 -0.574227
+-0.604125 0.227376 -0.464949 -0.605934
+-0.617702 0.187378 -0.424324 -0.635045
+-0.628635 0.146577 -0.381881 -0.661438
+-0.636876 0.105149 -0.337804 -0.684998
+-0.642389 0.0632698 -0.292279 -0.705625
+-0.645152 0.0211201 -0.245504 -0.72323
+0.645152 0.0211201 0.337804 0.684998
+0.642389 0.0632698 0.292279 0.705625
+0.636876 0.105149 0.245503 0.72323
+0.628635 0.146577 0.197676 0.737738
+0.617702 0.187378 0.149003 0.749087
+0.604125 0.227376 0.099691 0.757229
+0.58796 0.266401 0.0499525 0.762127
+0.569278 0.304285 -1.61233e-08 0.763763
+0.548158 0.340866 -0.0499525 0.762127
+0.52469 0.375988 -0.099691 0.757229
+0.498976 0.409499 -0.149003 0.749087
+0.471125 0.441257 -0.197676 0.737738
+0.441257 0.471125 -0.245503 0.72323
+0.409499 0.498976 -0.292279 0.705625
+0.375988 0.52469 -0.337804 0.684998
+0.340866 0.548158 -0.381881 0.661438
+0.304285 0.569278 -0.424324 0.635045
+0.266401 0.58796 -0.464949 0.605934
+0.227376 0.604125 -0.503584 0.574227
+0.187378 0.617702 -0.540062 0.540062
+0.146577 0.628635 -0.574227 0.503584
+0.105148 0.636876 -0.605934 0.464949
+0.0632697 0.642389 -0.635045 0.424324
+0.02112 0.645152 -0.661438 0.381881
+-0.0211201 0.645152 -0.684998 0.337804
+-0.0632698 0.642389 -0.705625 0.292279
+-0.105149 0.636876 -0.72323 0.245503
+-0.146577 0.628635 -0.737738 0.197676
+-0.187378 0.617702 -0.749087 0.149003
+-0.227377 0.604125 -0.757229 0.0996909
+-0.266401 0.58796 -0.762127 0.0499524
+-0.304285 0.569278 -0.763763 8.70629e-08
+-0.340866 0.548158 -0.762127 -0.0499525
+-0.375988 0.52469 -0.757229 -0.0996911
+-0.409499 0.498976 -0.749087 -0.149003
+-0.441257 0.471125 -0.737738 -0.197676
+-0.471125 0.441257 -0.72323 -0.245503
+-0.498976 0.409499 -0.705625 -0.292279
+-0.52469 0.375988 -0.684998 -0.337804
+-0.548158 0.340866 -0.661438 -0.381881
+-0.569278 0.304285 -0.635045 -0.424324
+-0.58796 0.266401 -0.605934 -0.464949
+-0.604125 0.227376 -0.574227 -0.503584
+-0.617702 0.187378 -0.540062 -0.540062
+-0.628635 0.146577 -0.503584 -0.574227
+-0.636876 0.105149 -0.464949 -0.605934
+-0.642389 0.0632698 -0.424324 -0.635045
+-0.645152 0.0211201 -0.381881 -0.661438
+0.676641 0.0221509 0.259249 0.688808
+0.673743 0.0663579 0.213644 0.704289
+0.667961 0.110281 0.167124 0.716754
+0.659318 0.153731 0.119888 0.72615
+0.647852 0.196524 0.0721387 0.732436
+0.633611 0.238474 0.0240806 0.735586
+0.616658 0.279404 -0.0240806 0.735586
+0.597064 0.319137 -0.0721387 0.732436
+0.574913 0.357504 -0.119888 0.72615
+0.5503 0.394339 -0.167124 0.716754
+0.523331 0.429486 -0.213644 0.704289
+0.49412 0.462794 -0.259249 0.688808
+0.462794 0.49412 -0.303744 0.670378
+0.429486 0.523331 -0.346939 0.649076
+0.394339 0.5503 -0.388647 0.624996
+0.357504 0.574913 -0.428692 0.598239
+0.319137 0.597064 -0.466901 0.56892
+0.279404 0.616658 -0.50311 0.537165
+0.238474 0.633611 -0.537165 0.50311
+0.196524 0.647852 -0.56892 0.466901
+0.153731 0.659318 -0.598239 0.428692
+0.110281 0.667961 -0.624996 0.388647
+0.0663578 0.673743 -0.649077 0.346939
+0.0221508 0.676641 -0.670378 0.303744
+-0.022151 0.676641 -0.688808 0.259249
+-0.066358 0.673743 -0.704289 0.213644
+-0.110281 0.667961 -0.716754 0.167124
+-0.153731 0.659318 -0.72615 0.119888
+-0.196524 0.647852 -0.732436 0.0721385
+-0.238475 0.633611 -0.735586 0.0240805
+-0.279404 0.616658 -0.735586 -0.0240807
+-0.319137 0.597064 -0.732436 -0.0721386
+-0.357504 0.574913 -0.72615 -0.119888
+-0.394339 0.5503 -0.716754 -0.167124
+-0.429486 0.523331 -0.704289 -0.213644
+-0.462794 0.49412 -0.688808 -0.259249
+-0.49412 0.462794 -0.670378 -0.303744
+-0.523331 0.429486 -0.649076 -0.346939
+-0.5503 0.394339 -0.624996 -0.388647
+-0.574913 0.357504 -0.598239 -0.428692
+-0.597063 0.319137 -0.56892 -0.466901
+-0.616658 0.279404 -0.537165 -0.50311
+-0.633611 0.238474 -0.50311 -0.537165
+-0.647852 0.196524 -0.466901 -0.56892
+-0.659318 0.153731 -0.428692 -0.598239
+-0.667961 0.110281 -0.388647 -0.624996
+-0.673743 0.0663579 -0.346939 -0.649076
+-0.676641 0.022151 -0.303744 -0.670378
+0.676641 0.0221509 0.119888 0.72615
+0.673743 0.0663579 0.0721387 0.732436
+0.667961 0.110281 0.0240806 0.735586
+0.659318 0.153731 -0.0240806 0.735586
+0.647852 0.196524 -0.0721387 0.732436
+0.633611 0.238474 -0.119888 0.72615
+0.616658 0.279404 -0.167124 0.716754
+0.597064 0.319137 -0.213644 0.704289
+0.574913 0.357504 -0.259249 0.688808
+0.5503 0.394339 -0.303744 0.670378
+0.523331 0.429486 -0.346939 0.649076
+0.49412 0.462794 -0.388647 0.624996
+0.462794 0.49412 -0.428692 0.598239
+0.429486 0.523331 -0.466901 0.56892
+0.394339 0.5503 -0.50311 0.537165
+0.357504 0.574913 -0.537165 0.50311
+0.319137 0.597064 -0.56892 0.466901
+0.279404 0.616658 -0.598239 0.428692
+0.238474 0.633611 -0.624996 0.388647
+0.196524 0.647852 -0.649076 0.346939
+0.153731 0.659318 -0.670378 0.303744
+0.110281 0.667961 -0.688808 0.259249
+0.0663578 0.673743 -0.704289 0.213644
+0.0221508 0.676641 -0.716754 0.167124
+-0.022151 0.676641 -0.72615 0.119888
+-0.066358 0.673743 -0.732436 0.0721386
+-0.110281 0.667961 -0.735586 0.0240805
+-0.153731 0.659318 -0.735586 -0.0240806
+-0.196524 0.647852 -0.732436 -0.0721388
+-0.238475 0.633611 -0.72615 -0.119888
+-0.279404 0.616658 -0.716754 -0.167124
+-0.319137 0.597064 -0.704289 -0.213644
+-0.357504 0.574913 -0.688808 -0.259249
+-0.394339 0.5503 -0.670378 -0.303744
+-0.429486 0.523331 -0.649076 -0.346939
+-0.462794 0.49412 -0.624996 -0.388647
+-0.49412 0.462794 -0.598239 -0.428692
+-0.523331 0.429486 -0.56892 -0.466901
+-0.5503 0.394339 -0.537165 -0.50311
+-0.574913 0.357504 -0.50311 -0.537165
+-0.597063 0.319137 -0.466901 -0.56892
+-0.616658 0.279404 -0.428692 -0.598239
+-0.633611 0.238474 -0.388647 -0.624996
+-0.647852 0.196524 -0.346939 -0.649076
+-0.659318 0.153731 -0.303744 -0.670378
+-0.667961 0.110281 -0.259249 -0.688808
+-0.673743 0.0663579 -0.213644 -0.704289
+-0.676641 0.022151 -0.167124 -0.716754
+0.706728 0.0231359 0.046247 0.705593
+0.703702 0.0693086 -1.93179e-09 0.707107
+0.697662 0.115184 -0.046247 0.705593
+0.688635 0.160567 -0.092296 0.701057
+0.676659 0.205262 -0.13795 0.69352
+0.661785 0.249078 -0.183013 0.683013
+0.644078 0.291828 -0.227292 0.669581
+0.623612 0.333328 -0.270598 0.653281
+0.600477 0.3734 -0.312745 0.634185
+0.574769 0.411874 -0.353553 0.612372
+0.546601 0.448584 -0.392847 0.587938
+0.516092 0.483373 -0.430459 0.560986
+0.483373 0.516092 -0.466228 0.531631
+0.448584 0.546601 -0.5 0.5
+0.411874 0.574769 -0.531631 0.466228
+0.3734 0.600477 -0.560986 0.430459
+0.333328 0.623613 -0.587938 0.392847
+0.291828 0.644078 -0.612372 0.353553
+0.249078 0.661785 -0.634185 0.312745
+0.205262 0.676659 -0.653281 0.270598
+0.160567 0.688635 -0.669581 0.227292
+0.115184 0.697662 -0.683013 0.183013
+0.0693085 0.703702 -0.69352 0.13795
+0.0231358 0.706728 -0.701057 0.0922959
+-0.023136 0.706728 -0.705593 0.0462469
+-0.0693086 0.703702 -0.707107 -5.91822e-08
+-0.115185 0.697662 -0.705593 -0.046247
+-0.160567 0.688635 -0.701057 -0.092296
+-0.205262 0.676659 -0.69352 -0.13795
+-0.249078 0.661785 -0.683013 -0.183013
+-0.291828 0.644078 -0.669581 -0.227292
+-0.333328 0.623613 -0.653282 -0.270598
+-0.3734 0.600477 -0.634185 -0.312745
+-0.411874 0.574769 -0.612372 -0.353553
+-0.448584 0.546601 -0.587938 -0.392847
+-0.483373 0.516092 -0.560985 -0.430459
+-0.516092 0.483373 -0.531631 -0.466228
+-0.546601 0.448584 -0.5 -0.5
+-0.574769 0.411874 -0.466228 -0.531631
+-0.600477 0.3734 -0.430459 -0.560986
+-0.623612 0.333328 -0.392848 -0.587938
+-0.644078 0.291828 -0.353553 -0.612372
+-0.661785 0.249078 -0.312745 -0.634185
+-0.676659 0.205262 -0.270598 -0.653281
+-0.688635 0.160567 -0.227292 -0.669581
+-0.697662 0.115185 -0.183013 -0.683013
+-0.703702 0.0693086 -0.13795 -0.69352
+-0.706728 0.0231359 -0.092296 -0.701057
+0.706728 0.0231359 0.183013 0.683013
+0.703702 0.0693086 0.13795 0.69352
+0.697662 0.115184 0.092296 0.701057
+0.688635 0.160567 0.046247 0.705593
+0.676659 0.205262 -5.2701e-10 0.707107
+0.661785 0.249078 -0.046247 0.705593
+0.644078 0.291828 -0.092296 0.701057
+0.623612 0.333328 -0.13795 0.69352
+0.600477 0.3734 -0.183013 0.683013
+0.574769 0.411874 -0.227292 0.669581
+0.546601 0.448584 -0.270598 0.653281
+0.516092 0.483373 -0.312745 0.634185
+0.483373 0.516092 -0.353553 0.612372
+0.448584 0.546601 -0.392847 0.587938
+0.411874 0.574769 -0.430459 0.560986
+0.3734 0.600477 -0.466228 0.531631
+0.333328 0.623613 -0.5 0.5
+0.291828 0.644078 -0.531631 0.466228
+0.249078 0.661785 -0.560986 0.430459
+0.205262 0.676659 -0.587938 0.392847
+0.160567 0.688635 -0.612372 0.353553
+0.115184 0.697662 -0.634185 0.312745
+0.0693085 0.703702 -0.653282 0.270598
+0.0231358 0.706728 -0.669581 0.227292
+-0.023136 0.706728 -0.683013 0.183013
+-0.0693086 0.703702 -0.69352 0.13795
+-0.115185 0.697662 -0.701057 0.0922959
+-0.160567 0.688635 -0.705593 0.046247
+-0.205262 0.676659 -0.707107 -1.15729e-07
+-0.249078 0.661785 -0.705593 -0.0462471
+-0.291828 0.644078 -0.701057 -0.0922961
+-0.333328 0.623613 -0.69352 -0.13795
+-0.3734 0.600477 -0.683013 -0.183013
+-0.411874 0.574769 -0.669581 -0.227292
+-0.448584 0.546601 -0.653281 -0.270598
+-0.483373 0.516092 -0.634185 -0.312745
+-0.516092 0.483373 -0.612372 -0.353553
+-0.546601 0.448584 -0.587938 -0.392848
+-0.574769 0.411874 -0.560986 -0.430459
+-0.600477 0.3734 -0.531631 -0.466228
+-0.623612 0.333328 -0.5 -0.5
+-0.644078 0.291828 -0.466228 -0.531631
+-0.661785 0.249078 -0.430459 -0.560986
+-0.676659 0.205262 -0.392847 -0.587938
+-0.688635 0.160567 -0.353553 -0.612372
+-0.697662 0.115185 -0.312745 -0.634185
+-0.703702 0.0693086 -0.270598 -0.653281
+-0.706728 0.0231359 -0.227292 -0.669581
+0.735586 0.0240806 0.110281 0.667961
+0.732436 0.0721387 0.0663579 0.673743
+0.72615 0.119888 0.0221509 0.676641
+0.716754 0.167124 -0.0221509 0.676641
+0.704289 0.213644 -0.0663579 0.673743
+0.688808 0.259249 -0.110281 0.667961
+0.670378 0.303744 -0.153731 0.659318
+0.649076 0.346939 -0.196524 0.647852
+0.624996 0.388647 -0.238474 0.633611
+0.598239 0.428692 -0.279404 0.616658
+0.56892 0.466901 -0.319137 0.597064
+0.537165 0.50311 -0.357504 0.574913
+0.50311 0.537165 -0.394339 0.5503
+0.466901 0.56892 -0.429486 0.523331
+0.428692 0.598239 -0.462794 0.49412
+0.388647 0.624996 -0.49412 0.462794
+0.346939 0.649077 -0.523331 0.429486
+0.303744 0.670378 -0.5503 0.394339
+0.259249 0.688808 -0.574913 0.357504
+0.213644 0.704289 -0.597064 0.319137
+0.167124 0.716754 -0.616658 0.279404
+0.119888 0.72615 -0.633611 0.238474
+0.0721386 0.732436 -0.647852 0.196524
+0.0240805 0.735586 -0.659318 0.153731
+-0.0240807 0.735586 -0.667961 0.110281
+-0.0721387 0.732436 -0.673743 0.0663579
+-0.119888 0.72615 -0.676641 0.0221509
+-0.167124 0.716754 -0.676641 -0.022151
+-0.213644 0.704289 -0.673743 -0.066358
+-0.259249 0.688808 -0.667961 -0.110281
+-0.303744 0.670378 -0.659318 -0.153731
+-0.346939 0.649077 -0.647852 -0.196524
+-0.388647 0.624996 -0.633611 -0.238474
+-0.428692 0.598239 -0.616658 -0.279404
+-0.466901 0.56892 -0.597064 -0.319137
+-0.50311 0.537165 -0.574913 -0.357504
+-0.537165 0.50311 -0.5503 -0.394339
+-0.56892 0.466901 -0.523331 -0.429486
+-0.598239 0.428692 -0.49412 -0.462794
+-0.624996 0.388647 -0.462794 -0.49412
+-0.649076 0.346939 -0.429486 -0.523331
+-0.670378 0.303744 -0.394339 -0.5503
+-0.688808 0.259249 -0.357504 -0.574913
+-0.704289 0.213644 -0.319137 -0.597064
+-0.716754 0.167124 -0.279404 -0.616658
+-0.72615 0.119888 -0.238474 -0.633611
+-0.732436 0.0721386 -0.196524 -0.647852
+-0.735586 0.0240807 -0.153731 -0.659318
+0.676641 0.0221509 0.388647 0.624996
+0.673743 0.0663579 0.346939 0.649076
+0.667961 0.110281 0.303744 0.670378
+0.659318 0.153731 0.259249 0.688808
+0.647852 0.196524 0.213644 0.704289
+0.633611 0.238474 0.167124 0.716754
+0.616658 0.279404 0.119888 0.72615
+0.597064 0.319137 0.0721386 0.732436
+0.574913 0.357504 0.0240806 0.735586
+0.5503 0.394339 -0.0240806 0.735586
+0.523331 0.429486 -0.0721386 0.732436
+0.49412 0.462794 -0.119888 0.72615
+0.462794 0.49412 -0.167124 0.716754
+0.429486 0.523331 -0.213644 0.704289
+0.394339 0.5503 -0.259249 0.688808
+0.357504 0.574913 -0.303744 0.670378
+0.319137 0.597064 -0.346939 0.649076
+0.279404 0.616658 -0.388647 0.624996
+0.238474 0.633611 -0.428692 0.598239
+0.196524 0.647852 -0.466901 0.56892
+0.153731 0.659318 -0.50311 0.537165
+0.110281 0.667961 -0.537165 0.50311
+0.0663578 0.673743 -0.56892 0.466901
+0.0221508 0.676641 -0.598239 0.428692
+-0.022151 0.676641 -0.624996 0.388647
+-0.066358 0.673743 -0.649077 0.346939
+-0.110281 0.667961 -0.670378 0.303744
+-0.153731 0.659318 -0.688808 0.259249
+-0.196524 0.647852 -0.704289 0.213644
+-0.238475 0.633611 -0.716754 0.167123
+-0.279404 0.616658 -0.72615 0.119888
+-0.319137 0.597064 -0.732436 0.0721387
+-0.357504 0.574913 -0.735586 0.0240806
+-0.394339 0.5503 -0.735586 -0.0240807
+-0.429486 0.523331 -0.732436 -0.0721386
+-0.462794 0.49412 -0.72615 -0.119888
+-0.49412 0.462794 -0.716754 -0.167124
+-0.523331 0.429486 -0.704289 -0.213644
+-0.5503 0.394339 -0.688808 -0.259249
+-0.574913 0.357504 -0.670378 -0.303744
+-0.597063 0.319137 -0.649077 -0.346939
+-0.616658 0.279404 -0.624996 -0.388647
+-0.633611 0.238474 -0.598239 -0.428692
+-0.647852 0.196524 -0.56892 -0.466901
+-0.659318 0.153731 -0.537165 -0.50311
+-0.667961 0.110281 -0.50311 -0.537165
+-0.673743 0.0663579 -0.466901 -0.56892
+-0.676641 0.022151 -0.428692 -0.598239
+0.706728 0.0231359 0.312745 0.634185
+0.703702 0.0693086 0.270598 0.653281
+0.697662 0.115184 0.227292 0.669581
+0.688635 0.160567 0.183013 0.683013
+0.676659 0.205262 0.13795 0.69352
+0.661785 0.249078 0.092296 0.701057
+0.644078 0.291828 0.046247 0.705593
+0.623612 0.333328 -1.49273e-08 0.707107
+0.600477 0.3734 -0.046247 0.705593
+0.574769 0.411874 -0.092296 0.701057
+0.546601 0.448584 -0.13795 0.69352
+0.516092 0.483373 -0.183013 0.683013
+0.483373 0.516092 -0.227292 0.669581
+0.448584 0.546601 -0.270598 0.653281
+0.411874 0.574769 -0.312745 0.634185
+0.3734 0.600477 -0.353553 0.612372
+0.333328 0.623613 -0.392848 0.587938
+0.291828 0.644078 -0.430459 0.560985
+0.249078 0.661785 -0.466228 0.531631
+0.205262 0.676659 -0.5 0.5
+0.160567 0.688635 -0.531631 0.466228
+0.115184 0.697662 -0.560986 0.430459
+0.0693085 0.703702 -0.587938 0.392847
+0.0231358 0.706728 -0.612372 0.353553
+-0.023136 0.706728 -0.634185 0.312745
+-0.0693086 0.703702 -0.653282 0.270598
+-0.115185 0.697662 -0.669581 0.227292
+-0.160567 0.688635 -0.683013 0.183013
+-0.205262 0.676659 -0.69352 0.13795
+-0.249078 0.661785 -0.701057 0.0922959
+-0.291828 0.644078 -0.705593 0.0462469
+-0.333328 0.623613 -0.707107 8.06046e-08
+-0.3734 0.600477 -0.705593 -0.046247
+-0.411874 0.574769 -0.701057 -0.092296
+-0.448584 0.546601 -0.69352 -0.13795
+-0.483373 0.516092 -0.683013 -0.183013
+-0.516092 0.483373 -0.669581 -0.227292
+-0.546601 0.448584 -0.653281 -0.270598
+-0.574769 0.411874 -0.634185 -0.312745
+-0.600477 0.3734 -0.612372 -0.353553
+-0.623612 0.333328 -0.587938 -0.392847
+-0.644078 0.291828 -0.560986 -0.430459
+-0.661785 0.249078 -0.531631 -0.466228
+-0.676659 0.205262 -0.5 -0.5
+-0.688635 0.160567 -0.466228 -0.531631
+-0.697662 0.115185 -0.430459 -0.560986
+-0.703702 0.0693086 -0.392847 -0.587938
+-0.706728 0.0231359 -0.353553 -0.612372
+0.706728 0.0231359 0.430459 0.560986
+0.703702 0.0693086 0.392847 0.587938
+0.697662 0.115184 0.353553 0.612372
+0.688635 0.160567 0.312745 0.634185
+0.676659 0.205262 0.270598 0.653281
+0.661785 0.249078 0.227292 0.669581
+0.644078 0.291828 0.183013 0.683013
+0.623612 0.333328 0.13795 0.69352
+0.600477 0.3734 0.092296 0.701057
+0.574769 0.411874 0.046247 0.705593
+0.546601 0.448584 1.28193e-08 0.707107
+0.516092 0.483373 -0.046247 0.705593
+0.483373 0.516092 -0.092296 0.701057
+0.448584 0.546601 -0.13795 0.69352
+0.411874 0.574769 -0.183013 0.683013
+0.3734 0.600477 -0.227292 0.669581
+0.333328 0.623613 -0.270598 0.653281
+0.291828 0.644078 -0.312745 0.634185
+0.249078 0.661785 -0.353553 0.612372
+0.205262 0.676659 -0.392848 0.587938
+0.160567 0.688635 -0.430459 0.560986
+0.115184 0.697662 -0.466228 0.531631
+0.0693085 0.703702 -0.5 0.5
+0.0231358 0.706728 -0.531631 0.466228
+-0.023136 0.706728 -0.560986 0.430459
+-0.0693086 0.703702 -0.587938 0.392847
+-0.115185 0.697662 -0.612372 0.353553
+-0.160567 0.688635 -0.634185 0.312745
+-0.205262 0.676659 -0.653282 0.270598
+-0.249078 0.661785 -0.669581 0.227292
+-0.291828 0.644078 -0.683013 0.183013
+-0.333328 0.623613 -0.69352 0.13795
+-0.3734 0.600477 -0.701057 0.092296
+-0.411874 0.574769 -0.705593 0.0462469
+-0.448584 0.546601 -0.707107 2.40575e-08
+-0.483373 0.516092 -0.705593 -0.046247
+-0.516092 0.483373 -0.701057 -0.0922959
+-0.546601 0.448584 -0.69352 -0.13795
+-0.574769 0.411874 -0.683013 -0.183013
+-0.600477 0.3734 -0.669581 -0.227292
+-0.623612 0.333328 -0.653282 -0.270598
+-0.644078 0.291828 -0.634185 -0.312745
+-0.661785 0.249078 -0.612372 -0.353553
+-0.676659 0.205262 -0.587938 -0.392847
+-0.688635 0.160567 -0.560985 -0.430459
+-0.697662 0.115185 -0.531631 -0.466228
+-0.703702 0.0693086 -0.5 -0.5
+-0.706728 0.0231359 -0.466228 -0.531631
+0.735586 0.0240806 0.357504 0.574913
+0.732436 0.0721387 0.319137 0.597064
+0.72615 0.119888 0.279404 0.616658
+0.716754 0.167124 0.238474 0.633611
+0.704289 0.213644 0.196524 0.647852
+0.688808 0.259249 0.153731 0.659318
+0.670378 0.303744 0.110281 0.667961
+0.649076 0.346939 0.0663579 0.673743
+0.624996 0.388647 0.0221509 0.676641
+0.598239 0.428692 -0.0221509 0.676641
+0.56892 0.466901 -0.0663579 0.673743
+0.537165 0.50311 -0.110281 0.667961
+0.50311 0.537165 -0.153731 0.659318
+0.466901 0.56892 -0.196524 0.647852
+0.428692 0.598239 -0.238474 0.633611
+0.388647 0.624996 -0.279404 0.616658
+0.346939 0.649077 -0.319137 0.597063
+0.303744 0.670378 -0.357504 0.574913
+0.259249 0.688808 -0.394339 0.5503
+0.213644 0.704289 -0.429486 0.523331
+0.167124 0.716754 -0.462794 0.49412
+0.119888 0.72615 -0.49412 0.462794
+0.0721386 0.732436 -0.523331 0.429486
+0.0240805 0.735586 -0.5503 0.394339
+-0.0240807 0.735586 -0.574913 0.357504
+-0.0721387 0.732436 -0.597064 0.319137
+-0.119888 0.72615 -0.616658 0.279404
+-0.167124 0.716754 -0.633611 0.238474
+-0.213644 0.704289 -0.647852 0.196524
+-0.259249 0.688808 -0.659318 0.153731
+-0.303744 0.670378 -0.667961 0.110281
+-0.346939 0.649077 -0.673743 0.066358
+-0.388647 0.624996 -0.676641 0.0221509
+-0.428692 0.598239 -0.676641 -0.022151
+-0.466901 0.56892 -0.673743 -0.0663579
+-0.50311 0.537165 -0.667961 -0.110281
+-0.537165 0.50311 -0.659318 -0.153731
+-0.56892 0.466901 -0.647852 -0.196524
+-0.598239 0.428692 -0.633611 -0.238474
+-0.624996 0.388647 -0.616658 -0.279404
+-0.649076 0.346939 -0.597064 -0.319137
+-0.670378 0.303744 -0.574913 -0.357504
+-0.688808 0.259249 -0.5503 -0.394339
+-0.704289 0.213644 -0.523331 -0.429486
+-0.716754 0.167124 -0.49412 -0.462794
+-0.72615 0.119888 -0.462794 -0.49412
+-0.732436 0.0721386 -0.429486 -0.523331
+-0.735586 0.0240807 -0.394339 -0.5503
+0.735586 0.0240806 0.238474 0.633611
+0.732436 0.0721387 0.196524 0.647852
+0.72615 0.119888 0.153731 0.659318
+0.716754 0.167124 0.110281 0.667961
+0.704289 0.213644 0.0663579 0.673743
+0.688808 0.259249 0.0221509 0.676641
+0.670378 0.303744 -0.0221509 0.676641
+0.649076 0.346939 -0.0663579 0.673743
+0.624996 0.388647 -0.110281 0.667961
+0.598239 0.428692 -0.153731 0.659318
+0.56892 0.466901 -0.196524 0.647852
+0.537165 0.50311 -0.238474 0.633611
+0.50311 0.537165 -0.279404 0.616658
+0.466901 0.56892 -0.319137 0.597064
+0.428692 0.598239 -0.357504 0.574913
+0.388647 0.624996 -0.394339 0.5503
+0.346939 0.649077 -0.429486 0.523331
+0.303744 0.670378 -0.462794 0.49412
+0.259249 0.688808 -0.49412 0.462794
+0.213644 0.704289 -0.523331 0.429486
+0.167124 0.716754 -0.5503 0.394339
+0.119888 0.72615 -0.574913 0.357504
+0.0721386 0.732436 -0.597064 0.319137
+0.0240805 0.735586 -0.616658 0.279404
+-0.0240807 0.735586 -0.633611 0.238474
+-0.0721387 0.732436 -0.647852 0.196524
+-0.119888 0.72615 -0.659318 0.153731
+-0.167124 0.716754 -0.667961 0.110281
+-0.213644 0.704289 -0.673743 0.0663578
+-0.259249 0.688808 -0.676641 0.0221508
+-0.303744 0.670378 -0.676641 -0.022151
+-0.346939 0.649077 -0.673743 -0.0663578
+-0.388647 0.624996 -0.667961 -0.110281
+-0.428692 0.598239 -0.659318 -0.153731
+-0.466901 0.56892 -0.647852 -0.196524
+-0.50311 0.537165 -0.633611 -0.238474
+-0.537165 0.50311 -0.616658 -0.279404
+-0.56892 0.466901 -0.597064 -0.319137
+-0.598239 0.428692 -0.574913 -0.357504
+-0.624996 0.388647 -0.5503 -0.394339
+-0.649076 0.346939 -0.523331 -0.429486
+-0.670378 0.303744 -0.49412 -0.462794
+-0.688808 0.259249 -0.462794 -0.49412
+-0.704289 0.213644 -0.429486 -0.523331
+-0.716754 0.167124 -0.394339 -0.5503
+-0.72615 0.119888 -0.357504 -0.574913
+-0.732436 0.0721386 -0.319137 -0.597064
+-0.735586 0.0240807 -0.279404 -0.616658
+0.763354 0.0249896 0.167067 0.623502
+0.760085 0.0748618 0.12593 0.633094
+0.753561 0.124413 0.0842543 0.639975
+0.743811 0.173432 0.0422175 0.644115
+0.730875 0.221709 -4.81092e-10 0.645497
+0.71481 0.269035 -0.0422175 0.644115
+0.695684 0.31521 -0.0842543 0.639975
+0.673578 0.360035 -0.12593 0.633094
+0.648589 0.403318 -0.167067 0.623502
+0.620822 0.444875 -0.207488 0.611241
+0.590396 0.484526 -0.247021 0.596362
+0.557443 0.522102 -0.285496 0.578929
+0.522102 0.557443 -0.322749 0.559017
+0.484526 0.590396 -0.358619 0.536711
+0.444875 0.620822 -0.392954 0.512107
+0.403318 0.648589 -0.425606 0.48531
+0.360035 0.673579 -0.456436 0.456435
+0.31521 0.695684 -0.485311 0.425606
+0.269035 0.71481 -0.512107 0.392954
+0.221709 0.730875 -0.536711 0.358619
+0.173432 0.743811 -0.559017 0.322749
+0.124413 0.753561 -0.578929 0.285496
+0.0748617 0.760085 -0.596362 0.247021
+0.0249895 0.763354 -0.611241 0.207488
+-0.0249897 0.763354 -0.623502 0.167067
+-0.0748619 0.760085 -0.633094 0.12593
+-0.124414 0.753561 -0.639975 0.0842542
+-0.173432 0.743811 -0.644115 0.0422175
+-0.221709 0.730875 -0.645497 -1.05646e-07
+-0.269036 0.71481 -0.644115 -0.0422176
+-0.31521 0.695684 -0.639975 -0.0842544
+-0.360035 0.673579 -0.633094 -0.12593
+-0.403318 0.648589 -0.623502 -0.167067
+-0.444875 0.620822 -0.611241 -0.207488
+-0.484526 0.590397 -0.596362 -0.247021
+-0.522102 0.557443 -0.578929 -0.285496
+-0.557443 0.522102 -0.559017 -0.322749
+-0.590397 0.484526 -0.536711 -0.358619
+-0.620822 0.444875 -0.512107 -0.392954
+-0.648589 0.403318 -0.485311 -0.425606
+-0.673578 0.360035 -0.456436 -0.456435
+-0.695684 0.31521 -0.425606 -0.485311
+-0.71481 0.269035 -0.392954 -0.512107
+-0.730875 0.221709 -0.358619 -0.536711
+-0.743811 0.173432 -0.322749 -0.559017
+-0.753561 0.124414 -0.285496 -0.578929
+-0.760085 0.0748618 -0.247021 -0.596362
+-0.763354 0.0249897 -0.207488 -0.611241
+0.763354 0.0249896 0.285496 0.578929
+0.760085 0.0748618 0.247021 0.596362
+0.753561 0.124413 0.207488 0.611241
+0.743811 0.173432 0.167067 0.623502
+0.730875 0.221709 0.12593 0.633094
+0.71481 0.269035 0.0842543 0.639975
+0.695684 0.31521 0.0422175 0.644115
+0.673578 0.360035 -1.36267e-08 0.645497
+0.648589 0.403318 -0.0422175 0.644115
+0.620822 0.444875 -0.0842543 0.639975
+0.590396 0.484526 -0.12593 0.633094
+0.557443 0.522102 -0.167067 0.623502
+0.522102 0.557443 -0.207488 0.611241
+0.484526 0.590396 -0.247021 0.596362
+0.444875 0.620822 -0.285496 0.578929
+0.403318 0.648589 -0.322749 0.559017
+0.360035 0.673579 -0.358619 0.536711
+0.31521 0.695684 -0.392954 0.512107
+0.269035 0.71481 -0.425606 0.48531
+0.221709 0.730875 -0.456435 0.456435
+0.173432 0.743811 -0.485311 0.425606
+0.124413 0.753561 -0.512107 0.392954
+0.0748617 0.760085 -0.536711 0.358619
+0.0249895 0.763354 -0.559017 0.322749
+-0.0249897 0.763354 -0.578929 0.285496
+-0.0748619 0.760085 -0.596362 0.247021
+-0.124414 0.753561 -0.611241 0.207488
+-0.173432 0.743811 -0.623502 0.167067
+-0.221709 0.730875 -0.633094 0.12593
+-0.269036 0.71481 -0.639975 0.0842542
+-0.31521 0.695684 -0.644115 0.0422174
+-0.360035 0.673579 -0.645497 7.35816e-08
+-0.403318 0.648589 -0.644115 -0.0422175
+-0.444875 0.620822 -0.639975 -0.0842544
+-0.484526 0.590397 -0.633094 -0.12593
+-0.522102 0.557443 -0.623502 -0.167067
+-0.557443 0.522102 -0.611241 -0.207488
+-0.590397 0.484526 -0.596362 -0.247021
+-0.620822 0.444875 -0.578929 -0.285496
+-0.648589 0.403318 -0.559017 -0.322749
+-0.673578 0.360035 -0.536711 -0.358619
+-0.695684 0.31521 -0.512107 -0.392954
+-0.71481 0.269035 -0.48531 -0.425606
+-0.730875 0.221709 -0.456435 -0.456435
+-0.743811 0.173432 -0.425606 -0.485311
+-0.753561 0.124414 -0.392954 -0.512107
+-0.760085 0.0748618 -0.358619 -0.536711
+-0.763354 0.0249897 -0.322749 -0.559017
+0.790146 0.0258667 0.215708 0.573123
+0.786763 0.0774894 0.177762 0.586004
+0.78001 0.12878 0.139055 0.596375
+0.769917 0.17952 0.0997527 0.604193
+0.756528 0.22949 0.060023 0.609424
+0.739899 0.278478 0.0200363 0.612045
+0.720101 0.326274 -0.0200363 0.612045
+0.69722 0.372672 -0.060023 0.609424
+0.671353 0.417474 -0.0997527 0.604193
+0.642612 0.460489 -0.139055 0.596375
+0.611118 0.501532 -0.177762 0.586004
+0.577008 0.540427 -0.215708 0.573123
+0.540427 0.577008 -0.25273 0.557788
+0.501532 0.611118 -0.28867 0.540064
+0.460489 0.642612 -0.323374 0.520028
+0.417474 0.671353 -0.356693 0.497765
+0.372672 0.69722 -0.388485 0.47337
+0.326274 0.720101 -0.418613 0.446949
+0.278478 0.739899 -0.446949 0.418613
+0.22949 0.756528 -0.47337 0.388485
+0.17952 0.769917 -0.497765 0.356693
+0.12878 0.78001 -0.520028 0.323374
+0.0774893 0.786763 -0.540064 0.28867
+0.0258666 0.790146 -0.557788 0.25273
+-0.0258668 0.790146 -0.573123 0.215708
+-0.0774894 0.786763 -0.586004 0.177762
+-0.12878 0.78001 -0.596375 0.139055
+-0.17952 0.769917 -0.604193 0.0997527
+-0.22949 0.756528 -0.609424 0.0600229
+-0.278478 0.739899 -0.612045 0.0200362
+-0.326274 0.720101 -0.612045 -0.0200363
+-0.372672 0.69722 -0.609424 -0.0600229
+-0.417474 0.671353 -0.604193 -0.0997527
+-0.460489 0.642612 -0.596375 -0.139055
+-0.501532 0.611118 -0.586004 -0.177762
+-0.540427 0.577008 -0.573123 -0.215708
+-0.577008 0.540427 -0.557788 -0.25273
+-0.611118 0.501532 -0.540064 -0.28867
+-0.642612 0.460489 -0.520028 -0.323374
+-0.671353 0.417474 -0.497765 -0.356693
+-0.69722 0.372672 -0.47337 -0.388485
+-0.720101 0.326274 -0.446949 -0.418613
+-0.739899 0.278478 -0.418613 -0.446949
+-0.756528 0.22949 -0.388485 -0.47337
+-0.769917 0.179519 -0.356693 -0.497765
+-0.78001 0.12878 -0.323374 -0.520028
+-0.786763 0.0774893 -0.28867 -0.540064
+-0.790146 0.0258668 -0.25273 -0.557788
+0.735586 0.0240806 -0.0221509 0.676641
+0.732436 0.0721387 -0.0663579 0.673743
+0.72615 0.119888 -0.110281 0.667961
+0.716754 0.167124 -0.153731 0.659318
+0.704289 0.213644 -0.196524 0.647852
+0.688808 0.259249 -0.238474 0.633611
+0.670378 0.303744 -0.279404 0.616658
+0.649076 0.346939 -0.319137 0.597064
+0.624996 0.388647 -0.357504 0.574913
+0.598239 0.428692 -0.394339 0.5503
+0.56892 0.466901 -0.429486 0.523331
+0.537165 0.50311 -0.462794 0.49412
+0.50311 0.537165 -0.49412 0.462794
+0.466901 0.56892 -0.523331 0.429486
+0.428692 0.598239 -0.5503 0.394339
+0.388647 0.624996 -0.574913 0.357504
+0.346939 0.649077 -0.597064 0.319137
+0.303744 0.670378 -0.616658 0.279404
+0.259249 0.688808 -0.633611 0.238474
+0.213644 0.704289 -0.647852 0.196524
+0.167124 0.716754 -0.659318 0.153731
+0.119888 0.72615 -0.667961 0.110281
+0.0721386 0.732436 -0.673743 0.0663578
+0.0240805 0.735586 -0.676641 0.0221508
+-0.0240807 0.735586 -0.676641 -0.022151
+-0.0721387 0.732436 -0.673743 -0.066358
+-0.119888 0.72615 -0.667961 -0.110281
+-0.167124 0.716754 -0.659318 -0.153731
+-0.213644 0.704289 -0.647852 -0.196524
+-0.259249 0.688808 -0.633611 -0.238475
+-0.303744 0.670378 -0.616658 -0.279404
+-0.346939 0.649077 -0.597064 -0.319137
+-0.388647 0.624996 -0.574913 -0.357504
+-0.428692 0.598239 -0.5503 -0.394339
+-0.466901 0.56892 -0.523331 -0.429486
+-0.50311 0.537165 -0.49412 -0.462794
+-0.537165 0.50311 -0.462794 -0.49412
+-0.56892 0.466901 -0.429486 -0.523331
+-0.598239 0.428692 -0.394339 -0.5503
+-0.624996 0.388647 -0.357504 -0.574913
+-0.649076 0.346939 -0.319137 -0.597063
+-0.670378 0.303744 -0.279404 -0.616658
+-0.688808 0.259249 -0.238474 -0.633611
+-0.704289 0.213644 -0.196524 -0.647852
+-0.716754 0.167124 -0.153731 -0.659318
+-0.72615 0.119888 -0.110281 -0.667961
+-0.732436 0.0721386 -0.0663579 -0.673743
+-0.735586 0.0240807 -0.022151 -0.676641
+0.763354 0.0249896 -0.0842543 0.639975
+0.760085 0.0748618 -0.12593 0.633094
+0.753561 0.124413 -0.167067 0.623502
+0.743811 0.173432 -0.207488 0.611241
+0.730875 0.221709 -0.247021 0.596362
+0.71481 0.269035 -0.285496 0.578929
+0.695684 0.31521 -0.322749 0.559017
+0.673578 0.360035 -0.358619 0.536711
+0.648589 0.403318 -0.392954 0.512107
+0.620822 0.444875 -0.425606 0.485311
+0.590396 0.484526 -0.456435 0.456435
+0.557443 0.522102 -0.485311 0.425606
+0.522102 0.557443 -0.512107 0.392954
+0.484526 0.590396 -0.536711 0.358619
+0.444875 0.620822 -0.559017 0.322749
+0.403318 0.648589 -0.578929 0.285496
+0.360035 0.673579 -0.596362 0.247021
+0.31521 0.695684 -0.611241 0.207488
+0.269035 0.71481 -0.623502 0.167067
+0.221709 0.730875 -0.633094 0.12593
+0.173432 0.743811 -0.639975 0.0842543
+0.124413 0.753561 -0.644115 0.0422175
+0.0748617 0.760085 -0.645497 -7.93547e-08
+0.0249895 0.763354 -0.644115 -0.0422176
+-0.0249897 0.763354 -0.639975 -0.0842544
+-0.0748619 0.760085 -0.633094 -0.12593
+-0.124414 0.753561 -0.623502 -0.167067
+-0.173432 0.743811 -0.611241 -0.207488
+-0.221709 0.730875 -0.596362 -0.247021
+-0.269036 0.71481 -0.578929 -0.285496
+-0.31521 0.695684 -0.559017 -0.322749
+-0.360035 0.673579 -0.536711 -0.358619
+-0.403318 0.648589 -0.512107 -0.392954
+-0.444875 0.620822 -0.48531 -0.425606
+-0.484526 0.590397 -0.456435 -0.456435
+-0.522102 0.557443 -0.425606 -0.485311
+-0.557443 0.522102 -0.392954 -0.512107
+-0.590397 0.484526 -0.358619 -0.536711
+-0.620822 0.444875 -0.322749 -0.559017
+-0.648589 0.403318 -0.285496 -0.578929
+-0.673578 0.360035 -0.247021 -0.596362
+-0.695684 0.31521 -0.207488 -0.611241
+-0.71481 0.269035 -0.167067 -0.623502
+-0.730875 0.221709 -0.12593 -0.633094
+-0.743811 0.173432 -0.0842542 -0.639975
+-0.753561 0.124414 -0.0422176 -0.644115
+-0.760085 0.0748618 3.0621e-08 -0.645497
+-0.763354 0.0249897 0.0422175 -0.644115
+0.763354 0.0249896 0.0422175 0.644115
+0.760085 0.0748618 -1.76347e-09 0.645497
+0.753561 0.124413 -0.0422175 0.644115
+0.743811 0.173432 -0.0842543 0.639975
+0.730875 0.221709 -0.12593 0.633094
+0.71481 0.269035 -0.167067 0.623502
+0.695684 0.31521 -0.207488 0.611241
+0.673578 0.360035 -0.247021 0.596362
+0.648589 0.403318 -0.285496 0.578929
+0.620822 0.444875 -0.322749 0.559017
+0.590396 0.484526 -0.358619 0.536711
+0.557443 0.522102 -0.392954 0.512107
+0.522102 0.557443 -0.425606 0.485311
+0.484526 0.590396 -0.456435 0.456435
+0.444875 0.620822 -0.485311 0.425606
+0.403318 0.648589 -0.512107 0.392954
+0.360035 0.673579 -0.536711 0.358619
+0.31521 0.695684 -0.559017 0.322749
+0.269035 0.71481 -0.578929 0.285496
+0.221709 0.730875 -0.596362 0.247021
+0.173432 0.743811 -0.611241 0.207488
+0.124413 0.753561 -0.623502 0.167067
+0.0748617 0.760085 -0.633094 0.12593
+0.0249895 0.763354 -0.639975 0.0842542
+-0.0249897 0.763354 -0.644115 0.0422175
+-0.0748619 0.760085 -0.645497 -5.40257e-08
+-0.124414 0.753561 -0.644115 -0.0422176
+-0.173432 0.743811 -0.639975 -0.0842543
+-0.221709 0.730875 -0.633094 -0.12593
+-0.269036 0.71481 -0.623502 -0.167067
+-0.31521 0.695684 -0.611241 -0.207488
+-0.360035 0.673579 -0.596362 -0.247021
+-0.403318 0.648589 -0.578929 -0.285496
+-0.444875 0.620822 -0.559017 -0.322749
+-0.484526 0.590397 -0.536711 -0.358619
+-0.522102 0.557443 -0.512107 -0.392954
+-0.557443 0.522102 -0.485311 -0.425606
+-0.590397 0.484526 -0.456435 -0.456435
+-0.620822 0.444875 -0.425606 -0.48531
+-0.648589 0.403318 -0.392954 -0.512107
+-0.673578 0.360035 -0.358619 -0.536711
+-0.695684 0.31521 -0.322749 -0.559017
+-0.71481 0.269035 -0.285496 -0.578929
+-0.730875 0.221709 -0.247021 -0.596362
+-0.743811 0.173432 -0.207488 -0.611241
+-0.753561 0.124414 -0.167067 -0.623502
+-0.760085 0.0748618 -0.12593 -0.633094
+-0.763354 0.0249897 -0.0842543 -0.639975
+0.790146 0.0258667 -0.0200363 0.612045
+0.786763 0.0774894 -0.060023 0.609424
+0.78001 0.12878 -0.0997527 0.604193
+0.769917 0.17952 -0.139055 0.596375
+0.756528 0.22949 -0.177762 0.586004
+0.739899 0.278478 -0.215708 0.573123
+0.720101 0.326274 -0.25273 0.557788
+0.69722 0.372672 -0.28867 0.540064
+0.671353 0.417474 -0.323374 0.520028
+0.642612 0.460489 -0.356693 0.497765
+0.611118 0.501532 -0.388485 0.47337
+0.577008 0.540427 -0.418613 0.446949
+0.540427 0.577008 -0.446949 0.418613
+0.501532 0.611118 -0.47337 0.388485
+0.460489 0.642612 -0.497765 0.356693
+0.417474 0.671353 -0.520028 0.323374
+0.372672 0.69722 -0.540064 0.28867
+0.326274 0.720101 -0.557788 0.25273
+0.278478 0.739899 -0.573123 0.215708
+0.22949 0.756528 -0.586004 0.177762
+0.17952 0.769917 -0.596375 0.139055
+0.12878 0.78001 -0.604193 0.0997526
+0.0774893 0.786763 -0.609424 0.0600229
+0.0258666 0.790146 -0.612045 0.0200362
+-0.0258668 0.790146 -0.612045 -0.0200363
+-0.0774894 0.786763 -0.609424 -0.060023
+-0.12878 0.78001 -0.604193 -0.0997527
+-0.17952 0.769917 -0.596375 -0.139055
+-0.22949 0.756528 -0.586004 -0.177762
+-0.278478 0.739899 -0.573123 -0.215708
+-0.326274 0.720101 -0.557788 -0.25273
+-0.372672 0.69722 -0.540064 -0.28867
+-0.417474 0.671353 -0.520028 -0.323374
+-0.460489 0.642612 -0.497765 -0.356693
+-0.501532 0.611118 -0.47337 -0.388485
+-0.540427 0.577008 -0.446949 -0.418613
+-0.577008 0.540427 -0.418613 -0.446949
+-0.611118 0.501532 -0.388485 -0.47337
+-0.642612 0.460489 -0.356693 -0.497765
+-0.671353 0.417474 -0.323374 -0.520028
+-0.69722 0.372672 -0.28867 -0.540064
+-0.720101 0.326274 -0.25273 -0.557788
+-0.739899 0.278478 -0.215708 -0.573123
+-0.756528 0.22949 -0.177762 -0.586004
+-0.769917 0.179519 -0.139055 -0.596375
+-0.78001 0.12878 -0.0997527 -0.604193
+-0.786763 0.0774893 -0.060023 -0.609424
+-0.790146 0.0258668 -0.0200363 -0.612045
+0.790146 0.0258667 -0.139055 0.596375
+0.786763 0.0774894 -0.177762 0.586004
+0.78001 0.12878 -0.215708 0.573123
+0.769917 0.17952 -0.25273 0.557788
+0.756528 0.22949 -0.28867 0.540064
+0.739899 0.278478 -0.323374 0.520028
+0.720101 0.326274 -0.356693 0.497765
+0.69722 0.372672 -0.388485 0.47337
+0.671353 0.417474 -0.418613 0.446949
+0.642612 0.460489 -0.446949 0.418613
+0.611118 0.501532 -0.47337 0.388485
+0.577008 0.540427 -0.497765 0.356693
+0.540427 0.577008 -0.520028 0.323374
+0.501532 0.611118 -0.540064 0.28867
+0.460489 0.642612 -0.557788 0.25273
+0.417474 0.671353 -0.573123 0.215708
+0.372672 0.69722 -0.586004 0.177762
+0.326274 0.720101 -0.596375 0.139055
+0.278478 0.739899 -0.604193 0.0997527
+0.22949 0.756528 -0.609424 0.060023
+0.17952 0.769917 -0.612045 0.0200362
+0.12878 0.78001 -0.612045 -0.0200363
+0.0774893 0.786763 -0.609424 -0.0600231
+0.0258666 0.790146 -0.604193 -0.0997528
+-0.0258668 0.790146 -0.596375 -0.139055
+-0.0774894 0.786763 -0.586004 -0.177762
+-0.12878 0.78001 -0.573123 -0.215708
+-0.17952 0.769917 -0.557788 -0.25273
+-0.22949 0.756528 -0.540064 -0.28867
+-0.278478 0.739899 -0.520028 -0.323374
+-0.326274 0.720101 -0.497765 -0.356693
+-0.372672 0.69722 -0.47337 -0.388485
+-0.417474 0.671353 -0.446949 -0.418613
+-0.460489 0.642612 -0.418613 -0.446949
+-0.501532 0.611118 -0.388485 -0.47337
+-0.540427 0.577008 -0.356693 -0.497765
+-0.577008 0.540427 -0.323374 -0.520028
+-0.611118 0.501532 -0.28867 -0.540064
+-0.642612 0.460489 -0.25273 -0.557788
+-0.671353 0.417474 -0.215708 -0.573123
+-0.69722 0.372672 -0.177762 -0.586004
+-0.720101 0.326274 -0.139055 -0.596375
+-0.739899 0.278478 -0.0997526 -0.604193
+-0.756528 0.22949 -0.060023 -0.609424
+-0.769917 0.179519 -0.0200362 -0.612045
+-0.78001 0.12878 0.0200362 -0.612045
+-0.786763 0.0774893 0.060023 -0.609424
+-0.790146 0.0258668 0.0997526 -0.604193
+0.816059 0.026715 -0.185583 0.54671
+0.812565 0.0800307 -0.220942 0.533402
+0.805591 0.133004 -0.255355 0.51781
+0.795167 0.185407 -0.288675 0.5
+0.781339 0.237016 -0.320759 0.480049
+0.764164 0.287611 -0.351469 0.458043
+0.743717 0.336974 -0.380673 0.434075
+0.720086 0.384894 -0.408248 0.408248
+0.693371 0.431166 -0.434075 0.380673
+0.663687 0.475591 -0.458043 0.351469
+0.63116 0.51798 -0.480049 0.320759
+0.595932 0.558151 -0.5 0.288675
+0.558151 0.595932 -0.51781 0.255355
+0.51798 0.63116 -0.533402 0.220942
+0.475591 0.663687 -0.54671 0.185583
+0.431166 0.693371 -0.557678 0.149429
+0.384894 0.720086 -0.566257 0.112635
+0.336974 0.743717 -0.572411 0.0753593
+0.287611 0.764164 -0.576114 0.0377605
+0.237016 0.781339 -0.57735 -2.48065e-08
+0.185407 0.795167 -0.576114 -0.0377605
+0.133003 0.805591 -0.572411 -0.0753594
+0.0800306 0.812565 -0.566257 -0.112636
+0.0267149 0.816059 -0.557678 -0.149429
+-0.0267151 0.816059 -0.54671 -0.185583
+-0.0800307 0.812565 -0.533402 -0.220942
+-0.133004 0.805591 -0.51781 -0.255356
+-0.185407 0.795167 -0.5 -0.288675
+-0.237017 0.781338 -0.480049 -0.320759
+-0.287611 0.764164 -0.458043 -0.351469
+-0.336974 0.743717 -0.434075 -0.380674
+-0.384894 0.720086 -0.408248 -0.408248
+-0.431166 0.693371 -0.380673 -0.434075
+-0.475591 0.663686 -0.351469 -0.458043
+-0.51798 0.63116 -0.320759 -0.480049
+-0.558151 0.595931 -0.288675 -0.5
+-0.595931 0.558151 -0.255356 -0.51781
+-0.63116 0.51798 -0.220942 -0.533402
+-0.663686 0.475591 -0.185583 -0.54671
+-0.693371 0.431166 -0.149429 -0.557678
+-0.720086 0.384894 -0.112636 -0.566257
+-0.743717 0.336974 -0.0753593 -0.572411
+-0.764164 0.287611 -0.0377605 -0.576114
+-0.781339 0.237016 -1.87823e-08 -0.57735
+-0.795167 0.185407 0.0377606 -0.576114
+-0.805591 0.133004 0.0753593 -0.572411
+-0.812565 0.0800306 0.112635 -0.566257
+-0.816059 0.0267151 0.149429 -0.557678
+0.816059 0.026715 -0.0753593 0.572411
+0.812565 0.0800307 -0.112635 0.566257
+0.805591 0.133004 -0.149429 0.557678
+0.795167 0.185407 -0.185583 0.54671
+0.781339 0.237016 -0.220942 0.533402
+0.764164 0.287611 -0.255355 0.51781
+0.743717 0.336974 -0.288675 0.5
+0.720086 0.384894 -0.320759 0.480049
+0.693371 0.431166 -0.351469 0.458043
+0.663687 0.475591 -0.380673 0.434075
+0.63116 0.51798 -0.408248 0.408248
+0.595932 0.558151 -0.434075 0.380673
+0.558151 0.595932 -0.458043 0.351469
+0.51798 0.63116 -0.480049 0.320759
+0.475591 0.663687 -0.5 0.288675
+0.431166 0.693371 -0.51781 0.255355
+0.384894 0.720086 -0.533402 0.220942
+0.336974 0.743717 -0.54671 0.185583
+0.287611 0.764164 -0.557678 0.149429
+0.237016 0.781339 -0.566257 0.112635
+0.185407 0.795167 -0.572411 0.0753593
+0.133003 0.805591 -0.576114 0.0377604
+0.0800306 0.812565 -0.57735 -7.0977e-08
+0.0267149 0.816059 -0.576114 -0.0377606
+-0.0267151 0.816059 -0.572411 -0.0753594
+-0.0800307 0.812565 -0.566257 -0.112635
+-0.133004 0.805591 -0.557678 -0.149429
+-0.185407 0.795167 -0.54671 -0.185583
+-0.237017 0.781338 -0.533402 -0.220942
+-0.287611 0.764164 -0.51781 -0.255356
+-0.336974 0.743717 -0.5 -0.288675
+-0.384894 0.720086 -0.480049 -0.320759
+-0.431166 0.693371 -0.458043 -0.351469
+-0.475591 0.663686 -0.434075 -0.380674
+-0.51798 0.63116 -0.408248 -0.408248
+-0.558151 0.595931 -0.380673 -0.434075
+-0.595931 0.558151 -0.351469 -0.458043
+-0.63116 0.51798 -0.320759 -0.480049
+-0.663686 0.475591 -0.288675 -0.5
+-0.693371 0.431166 -0.255355 -0.51781
+-0.720086 0.384894 -0.220942 -0.533402
+-0.743717 0.336974 -0.185583 -0.54671
+-0.764164 0.287611 -0.149429 -0.557678
+-0.781339 0.237016 -0.112635 -0.566257
+-0.795167 0.185407 -0.0753593 -0.572411
+-0.805591 0.133004 -0.0377605 -0.576114
+-0.812565 0.0800306 2.73883e-08 -0.57735
+-0.816059 0.0267151 0.0377605 -0.576114
+0.841175 0.0275372 -0.122635 0.525954
+0.837573 0.0824937 -0.156772 0.516807
+0.830384 0.137097 -0.190237 0.505447
+0.81964 0.191113 -0.222887 0.491923
+0.805385 0.244311 -0.254583 0.476292
+0.787682 0.296463 -0.285189 0.458622
+0.766606 0.347345 -0.314574 0.438987
+0.742247 0.396739 -0.342612 0.417473
+0.71471 0.444435 -0.369182 0.394172
+0.684112 0.490228 -0.394172 0.369182
+0.650585 0.533921 -0.417473 0.342612
+0.614272 0.575329 -0.438987 0.314574
+0.575329 0.614272 -0.458622 0.285189
+0.533922 0.650585 -0.476292 0.254583
+0.490228 0.684112 -0.491923 0.222887
+0.444435 0.71471 -0.505447 0.190237
+0.396739 0.742247 -0.516807 0.156772
+0.347345 0.766606 -0.525954 0.122635
+0.296463 0.787682 -0.532848 0.0879736
+0.244311 0.805385 -0.537461 0.0529353
+0.191113 0.81964 -0.539773 0.0176703
+0.137097 0.830384 -0.539773 -0.0176704
+0.0824936 0.837573 -0.537461 -0.0529354
+0.0275371 0.841175 -0.532848 -0.0879737
+-0.0275373 0.841175 -0.525954 -0.122635
+-0.0824938 0.837573 -0.516807 -0.156772
+-0.137097 0.830384 -0.505447 -0.190237
+-0.191113 0.81964 -0.491923 -0.222887
+-0.244311 0.805385 -0.476292 -0.254583
+-0.296463 0.787682 -0.458622 -0.285189
+-0.347345 0.766606 -0.438987 -0.314574
+-0.396739 0.742247 -0.417473 -0.342611
+-0.444435 0.71471 -0.394172 -0.369182
+-0.490228 0.684112 -0.369182 -0.394172
+-0.533921 0.650585 -0.342612 -0.417473
+-0.575329 0.614272 -0.314574 -0.438987
+-0.614272 0.575329 -0.285189 -0.458622
+-0.650585 0.533921 -0.254583 -0.476292
+-0.684112 0.490228 -0.222887 -0.491923
+-0.71471 0.444435 -0.190237 -0.505447
+-0.742247 0.39674 -0.156772 -0.516807
+-0.766606 0.347345 -0.122635 -0.525954
+-0.787682 0.296463 -0.0879736 -0.532848
+-0.805385 0.244311 -0.0529353 -0.537461
+-0.81964 0.191113 -0.0176703 -0.539773
+-0.830384 0.137097 0.0176703 -0.539773
+-0.837573 0.0824937 0.0529353 -0.537461
+-0.841175 0.0275373 0.0879736 -0.532848
+0.790146 0.0258667 0.0997527 0.604193
+0.786763 0.0774894 0.060023 0.609424
+0.78001 0.12878 0.0200363 0.612045
+0.769917 0.17952 -0.0200363 0.612045
+0.756528 0.22949 -0.060023 0.609424
+0.739899 0.278478 -0.0997527 0.604193
+0.720101 0.326274 -0.139055 0.596375
+0.69722 0.372672 -0.177762 0.586004
+0.671353 0.417474 -0.215708 0.573123
+0.642612 0.460489 -0.25273 0.557788
+0.611118 0.501532 -0.28867 0.540064
+0.577008 0.540427 -0.323374 0.520028
+0.540427 0.577008 -0.356693 0.497765
+0.501532 0.611118 -0.388485 0.47337
+0.460489 0.642612 -0.418613 0.446949
+0.417474 0.671353 -0.446949 0.418613
+0.372672 0.69722 -0.47337 0.388485
+0.326274 0.720101 -0.497765 0.356693
+0.278478 0.739899 -0.520028 0.323374
+0.22949 0.756528 -0.540064 0.28867
+0.17952 0.769917 -0.557788 0.25273
+0.12878 0.78001 -0.573123 0.215708
+0.0774893 0.786763 -0.586004 0.177762
+0.0258666 0.790146 -0.596375 0.139055
+-0.0258668 0.790146 -0.604193 0.0997526
+-0.0774894 0.786763 -0.609424 0.0600229
+-0.12878 0.78001 -0.612045 0.0200362
+-0.17952 0.769917 -0.612045 -0.0200363
+-0.22949 0.756528 -0.609424 -0.0600231
+-0.278478 0.739899 -0.604193 -0.0997528
+-0.326274 0.720101 -0.596375 -0.139055
+-0.372672 0.69722 -0.586004 -0.177762
+-0.417474 0.671353 -0.573123 -0.215708
+-0.460489 0.642612 -0.557788 -0.25273
+-0.501532 0.611118 -0.540064 -0.28867
+-0.540427 0.577008 -0.520028 -0.323374
+-0.577008 0.540427 -0.497765 -0.356693
+-0.611118 0.501532 -0.47337 -0.388485
+-0.642612 0.460489 -0.446949 -0.418613
+-0.671353 0.417474 -0.418613 -0.446949
+-0.69722 0.372672 -0.388485 -0.47337
+-0.720101 0.326274 -0.356693 -0.497765
+-0.739899 0.278478 -0.323374 -0.520028
+-0.756528 0.22949 -0.28867 -0.540064
+-0.769917 0.179519 -0.25273 -0.557788
+-0.78001 0.12878 -0.215708 -0.573123
+-0.786763 0.0774893 -0.177762 -0.586004
+-0.790146 0.0258668 -0.139055 -0.596375
+0.816059 0.026715 0.0377605 0.576114
+0.812565 0.0800307 -1.5773e-09 0.57735
+0.805591 0.133004 -0.0377605 0.576114
+0.795167 0.185407 -0.0753593 0.572411
+0.781339 0.237016 -0.112635 0.566257
+0.764164 0.287611 -0.149429 0.557678
+0.743717 0.336974 -0.185583 0.54671
+0.720086 0.384894 -0.220942 0.533402
+0.693371 0.431166 -0.255355 0.51781
+0.663687 0.475591 -0.288675 0.5
+0.63116 0.51798 -0.320759 0.480049
+0.595932 0.558151 -0.351469 0.458043
+0.558151 0.595932 -0.380673 0.434075
+0.51798 0.63116 -0.408248 0.408248
+0.475591 0.663687 -0.434075 0.380673
+0.431166 0.693371 -0.458043 0.351469
+0.384894 0.720086 -0.480049 0.320759
+0.336974 0.743717 -0.5 0.288675
+0.287611 0.764164 -0.51781 0.255355
+0.237016 0.781339 -0.533402 0.220942
+0.185407 0.795167 -0.54671 0.185583
+0.133003 0.805591 -0.557678 0.149429
+0.0800306 0.812565 -0.566257 0.112635
+0.0267149 0.816059 -0.572411 0.0753593
+-0.0267151 0.816059 -0.576114 0.0377605
+-0.0800307 0.812565 -0.57735 -4.83221e-08
+-0.133004 0.805591 -0.576114 -0.0377606
+-0.185407 0.795167 -0.572411 -0.0753594
+-0.237017 0.781338 -0.566257 -0.112636
+-0.287611 0.764164 -0.557678 -0.149429
+-0.336974 0.743717 -0.54671 -0.185583
+-0.384894 0.720086 -0.533402 -0.220942
+-0.431166 0.693371 -0.51781 -0.255355
+-0.475591 0.663686 -0.5 -0.288675
+-0.51798 0.63116 -0.480049 -0.320759
+-0.558151 0.595931 -0.458043 -0.351469
+-0.595931 0.558151 -0.434075 -0.380673
+-0.63116 0.51798 -0.408248 -0.408248
+-0.663686 0.475591 -0.380674 -0.434075
+-0.693371 0.431166 -0.351469 -0.458043
+-0.720086 0.384894 -0.320759 -0.480049
+-0.743717 0.336974 -0.288675 -0.5
+-0.764164 0.287611 -0.255355 -0.51781
+-0.781339 0.237016 -0.220942 -0.533402
+-0.795167 0.185407 -0.185583 -0.54671
+-0.805591 0.133004 -0.149429 -0.557678
+-0.812565 0.0800306 -0.112635 -0.566257
+-0.816059 0.0267151 -0.0753594 -0.572411
+0.816059 0.026715 0.149429 0.557678
+0.812565 0.0800307 0.112635 0.566257
+0.805591 0.133004 0.0753593 0.572411
+0.795167 0.185407 0.0377605 0.576114
+0.781339 0.237016 -4.30302e-10 0.57735
+0.764164 0.287611 -0.0377605 0.576114
+0.743717 0.336974 -0.0753593 0.572411
+0.720086 0.384894 -0.112635 0.566257
+0.693371 0.431166 -0.149429 0.557678
+0.663687 0.475591 -0.185583 0.54671
+0.63116 0.51798 -0.220942 0.533402
+0.595932 0.558151 -0.255356 0.51781
+0.558151 0.595932 -0.288675 0.5
+0.51798 0.63116 -0.320759 0.480049
+0.475591 0.663687 -0.351469 0.458043
+0.431166 0.693371 -0.380674 0.434075
+0.384894 0.720086 -0.408248 0.408248
+0.336974 0.743717 -0.434075 0.380673
+0.287611 0.764164 -0.458043 0.351469
+0.237016 0.781339 -0.480049 0.320759
+0.185407 0.795167 -0.5 0.288675
+0.133003 0.805591 -0.51781 0.255355
+0.0800306 0.812565 -0.533402 0.220942
+0.0267149 0.816059 -0.54671 0.185583
+-0.0267151 0.816059 -0.557678 0.149429
+-0.0800307 0.812565 -0.566257 0.112635
+-0.133004 0.805591 -0.572411 0.0753593
+-0.185407 0.795167 -0.576114 0.0377605
+-0.237017 0.781338 -0.57735 -9.44926e-08
+-0.287611 0.764164 -0.576114 -0.0377606
+-0.336974 0.743717 -0.572411 -0.0753594
+-0.384894 0.720086 -0.566257 -0.112635
+-0.431166 0.693371 -0.557678 -0.149429
+-0.475591 0.663686 -0.54671 -0.185583
+-0.51798 0.63116 -0.533402 -0.220942
+-0.558151 0.595931 -0.51781 -0.255356
+-0.595931 0.558151 -0.5 -0.288675
+-0.63116 0.51798 -0.480049 -0.320759
+-0.663686 0.475591 -0.458043 -0.351469
+-0.693371 0.431166 -0.434075 -0.380673
+-0.720086 0.384894 -0.408248 -0.408248
+-0.743717 0.336974 -0.380673 -0.434075
+-0.764164 0.287611 -0.351469 -0.458043
+-0.781339 0.237016 -0.320759 -0.480049
+-0.795167 0.185407 -0.288675 -0.5
+-0.805591 0.133004 -0.255356 -0.51781
+-0.812565 0.0800306 -0.220942 -0.533402
+-0.816059 0.0267151 -0.185583 -0.54671
+0.841175 0.0275372 0.0879736 0.532848
+0.837573 0.0824937 0.0529353 0.537461
+0.830384 0.137097 0.0176703 0.539773
+0.81964 0.191113 -0.0176703 0.539773
+0.805385 0.244311 -0.0529353 0.537461
+0.787682 0.296463 -0.0879736 0.532848
+0.766606 0.347345 -0.122635 0.525954
+0.742247 0.396739 -0.156772 0.516807
+0.71471 0.444435 -0.190237 0.505447
+0.684112 0.490228 -0.222887 0.491923
+0.650585 0.533921 -0.254583 0.476292
+0.614272 0.575329 -0.285189 0.458622
+0.575329 0.614272 -0.314574 0.438987
+0.533922 0.650585 -0.342612 0.417473
+0.490228 0.684112 -0.369182 0.394172
+0.444435 0.71471 -0.394172 0.369182
+0.396739 0.742247 -0.417473 0.342611
+0.347345 0.766606 -0.438987 0.314574
+0.296463 0.787682 -0.458622 0.285189
+0.244311 0.805385 -0.476292 0.254583
+0.191113 0.81964 -0.491923 0.222887
+0.137097 0.830384 -0.505447 0.190237
+0.0824936 0.837573 -0.516807 0.156772
+0.0275371 0.841175 -0.525954 0.122635
+-0.0275373 0.841175 -0.532848 0.0879736
+-0.0824938 0.837573 -0.537461 0.0529353
+-0.137097 0.830384 -0.539773 0.0176703
+-0.191113 0.81964 -0.539773 -0.0176704
+-0.244311 0.805385 -0.537461 -0.0529354
+-0.296463 0.787682 -0.532848 -0.0879737
+-0.347345 0.766606 -0.525954 -0.122635
+-0.396739 0.742247 -0.516807 -0.156772
+-0.444435 0.71471 -0.505447 -0.190237
+-0.490228 0.684112 -0.491923 -0.222887
+-0.533921 0.650585 -0.476292 -0.254583
+-0.575329 0.614272 -0.458622 -0.285189
+-0.614272 0.575329 -0.438987 -0.314574
+-0.650585 0.533921 -0.417473 -0.342612
+-0.684112 0.490228 -0.394172 -0.369182
+-0.71471 0.444435 -0.369182 -0.394172
+-0.742247 0.39674 -0.342612 -0.417473
+-0.766606 0.347345 -0.314574 -0.438987
+-0.787682 0.296463 -0.285189 -0.458622
+-0.805385 0.244311 -0.254583 -0.476292
+-0.81964 0.191113 -0.222887 -0.491923
+-0.830384 0.137097 -0.190237 -0.505447
+-0.837573 0.0824937 -0.156772 -0.516807
+-0.841175 0.0275373 -0.122635 -0.525954
+0.841175 0.0275372 -0.0176703 0.539773
+0.837573 0.0824937 -0.0529353 0.537461
+0.830384 0.137097 -0.0879736 0.532848
+0.81964 0.191113 -0.122635 0.525954
+0.805385 0.244311 -0.156772 0.516807
+0.787682 0.296463 -0.190237 0.505447
+0.766606 0.347345 -0.222887 0.491923
+0.742247 0.396739 -0.254583 0.476292
+0.71471 0.444435 -0.285189 0.458622
+0.684112 0.490228 -0.314574 0.438987
+0.650585 0.533921 -0.342612 0.417473
+0.614272 0.575329 -0.369182 0.394172
+0.575329 0.614272 -0.394172 0.369182
+0.533922 0.650585 -0.417473 0.342612
+0.490228 0.684112 -0.438987 0.314574
+0.444435 0.71471 -0.458622 0.285189
+0.396739 0.742247 -0.476292 0.254583
+0.347345 0.766606 -0.491923 0.222887
+0.296463 0.787682 -0.505447 0.190237
+0.244311 0.805385 -0.516807 0.156772
+0.191113 0.81964 -0.525954 0.122635
+0.137097 0.830384 -0.532848 0.0879735
+0.0824936 0.837573 -0.537461 0.0529352
+0.0275371 0.841175 -0.539773 0.0176703
+-0.0275373 0.841175 -0.539773 -0.0176704
+-0.0824938 0.837573 -0.537461 -0.0529354
+-0.137097 0.830384 -0.532848 -0.0879736
+-0.191113 0.81964 -0.525954 -0.122635
+-0.244311 0.805385 -0.516807 -0.156772
+-0.296463 0.787682 -0.505447 -0.190237
+-0.347345 0.766606 -0.491923 -0.222887
+-0.396739 0.742247 -0.476292 -0.254583
+-0.444435 0.71471 -0.458622 -0.285189
+-0.490228 0.684112 -0.438987 -0.314574
+-0.533921 0.650585 -0.417473 -0.342612
+-0.575329 0.614272 -0.394172 -0.369182
+-0.614272 0.575329 -0.369182 -0.394172
+-0.650585 0.533921 -0.342612 -0.417473
+-0.684112 0.490228 -0.314574 -0.438987
+-0.71471 0.444435 -0.285189 -0.458622
+-0.742247 0.39674 -0.254583 -0.476292
+-0.766606 0.347345 -0.222887 -0.491923
+-0.787682 0.296463 -0.190237 -0.505447
+-0.805385 0.244311 -0.156772 -0.516807
+-0.81964 0.191113 -0.122635 -0.525954
+-0.830384 0.137097 -0.0879736 -0.532848
+-0.837573 0.0824937 -0.0529353 -0.537461
+-0.841175 0.0275373 -0.0176704 -0.539773
+0.865562 0.0283356 -0.0652631 0.495722
+0.861855 0.0848853 -0.0975452 0.490393
+0.854458 0.141072 -0.12941 0.482963
+0.843402 0.196654 -0.16072 0.473465
+0.828735 0.251394 -0.191342 0.46194
+0.810518 0.305057 -0.221144 0.448436
+0.788831 0.357415 -0.25 0.433013
+0.763766 0.408242 -0.277785 0.415735
+0.735431 0.45732 -0.304381 0.396677
+0.703946 0.50444 -0.329673 0.37592
+0.669447 0.549401 -0.353553 0.353553
+0.632081 0.592008 -0.37592 0.329673
+0.592008 0.632081 -0.396677 0.304381
+0.549401 0.669447 -0.415735 0.277785
+0.50444 0.703946 -0.433013 0.25
+0.45732 0.735431 -0.448436 0.221144
+0.408241 0.763766 -0.46194 0.191342
+0.357415 0.788831 -0.473465 0.16072
+0.305057 0.810518 -0.482963 0.129409
+0.251394 0.828735 -0.490393 0.0975451
+0.196654 0.843402 -0.495722 0.0652631
+0.141072 0.854458 -0.498929 0.0327015
+0.0848852 0.861855 -0.5 -6.14679e-08
+0.0283355 0.865562 -0.498929 -0.0327016
+-0.0283356 0.865562 -0.495722 -0.0652631
+-0.0848854 0.861855 -0.490393 -0.0975452
+-0.141072 0.854458 -0.482963 -0.12941
+-0.196654 0.843402 -0.473465 -0.16072
+-0.251394 0.828735 -0.46194 -0.191342
+-0.305058 0.810518 -0.448436 -0.221144
+-0.357415 0.788831 -0.433013 -0.25
+-0.408241 0.763766 -0.415735 -0.277785
+-0.45732 0.735431 -0.396677 -0.304381
+-0.504441 0.703946 -0.37592 -0.329673
+-0.549401 0.669447 -0.353553 -0.353553
+-0.592008 0.632081 -0.329673 -0.37592
+-0.632081 0.592008 -0.304381 -0.396677
+-0.669447 0.549401 -0.277785 -0.415735
+-0.703946 0.504441 -0.25 -0.433013
+-0.735431 0.45732 -0.221144 -0.448436
+-0.763766 0.408242 -0.191342 -0.46194
+-0.788831 0.357415 -0.16072 -0.473465
+-0.810518 0.305057 -0.129409 -0.482963
+-0.828735 0.251394 -0.0975452 -0.490393
+-0.843402 0.196654 -0.0652631 -0.495722
+-0.854458 0.141072 -0.0327016 -0.498929
+-0.861855 0.0848853 2.3719e-08 -0.5
+-0.865562 0.0283356 0.0327015 -0.498929
+0.865562 0.0283356 0.0327016 0.498929
+0.861855 0.0848853 -1.36598e-09 0.5
+0.854458 0.141072 -0.0327016 0.498929
+0.843402 0.196654 -0.0652631 0.495722
+0.828735 0.251394 -0.0975452 0.490393
+0.810518 0.305057 -0.12941 0.482963
+0.788831 0.357415 -0.16072 0.473465
+0.763766 0.408242 -0.191342 0.46194
+0.735431 0.45732 -0.221144 0.448436
+0.703946 0.50444 -0.25 0.433013
+0.669447 0.549401 -0.277785 0.415735
+0.632081 0.592008 -0.304381 0.396677
+0.592008 0.632081 -0.329673 0.37592
+0.549401 0.669447 -0.353553 0.353553
+0.50444 0.703946 -0.37592 0.329673
+0.45732 0.735431 -0.396677 0.304381
+0.408241 0.763766 -0.415735 0.277785
+0.357415 0.788831 -0.433013 0.25
+0.305057 0.810518 -0.448436 0.221144
+0.251394 0.828735 -0.46194 0.191342
+0.196654 0.843402 -0.473465 0.16072
+0.141072 0.854458 -0.482963 0.129409
+0.0848852 0.861855 -0.490393 0.0975451
+0.0283355 0.865562 -0.495722 0.065263
+-0.0283356 0.865562 -0.498929 0.0327015
+-0.0848854 0.861855 -0.5 -4.18481e-08
+-0.141072 0.854458 -0.498929 -0.0327016
+-0.196654 0.843402 -0.495722 -0.0652631
+-0.251394 0.828735 -0.490393 -0.0975452
+-0.305058 0.810518 -0.482963 -0.12941
+-0.357415 0.788831 -0.473465 -0.16072
+-0.408241 0.763766 -0.46194 -0.191342
+-0.45732 0.735431 -0.448436 -0.221144
+-0.504441 0.703946 -0.433013 -0.25
+-0.549401 0.669447 -0.415735 -0.277785
+-0.592008 0.632081 -0.396677 -0.304381
+-0.632081 0.592008 -0.37592 -0.329673
+-0.669447 0.549401 -0.353553 -0.353553
+-0.703946 0.504441 -0.329673 -0.37592
+-0.735431 0.45732 -0.304381 -0.396677
+-0.763766 0.408242 -0.277785 -0.415735
+-0.788831 0.357415 -0.25 -0.433013
+-0.810518 0.305057 -0.221144 -0.448436
+-0.828735 0.251394 -0.191342 -0.46194
+-0.843402 0.196654 -0.16072 -0.473465
+-0.854458 0.141072 -0.12941 -0.482963
+-0.861855 0.0848853 -0.0975451 -0.490393
+-0.865562 0.0283356 -0.0652631 -0.495722
+0.88928 0.029112 -0.0149342 0.456191
+0.885472 0.0872114 -0.0447385 0.454238
+0.877872 0.144937 -0.0743513 0.450339
+0.866513 0.202043 -0.103646 0.444512
+0.851444 0.258283 -0.132496 0.436782
+0.832728 0.313417 -0.160779 0.427181
+0.810447 0.367209 -0.188374 0.415751
+0.784695 0.419428 -0.215162 0.40254
+0.755583 0.469852 -0.241029 0.387606
+0.723236 0.518263 -0.265863 0.371012
+0.687791 0.564456 -0.28956 0.352829
+0.649401 0.608231 -0.312016 0.333136
+0.608231 0.649401 -0.333136 0.312016
+0.564456 0.687791 -0.352829 0.28956
+0.518263 0.723236 -0.371012 0.265863
+0.469852 0.755583 -0.387606 0.241029
+0.419428 0.784695 -0.40254 0.215162
+0.367209 0.810447 -0.415751 0.188374
+0.313417 0.832728 -0.427181 0.160779
+0.258283 0.851444 -0.436782 0.132496
+0.202043 0.866513 -0.444512 0.103646
+0.144937 0.877872 -0.450339 0.0743512
+0.0872113 0.885472 -0.454238 0.0447384
+0.0291119 0.88928 -0.456191 0.0149341
+-0.0291121 0.88928 -0.456191 -0.0149342
+-0.0872115 0.885472 -0.454238 -0.0447385
+-0.144937 0.877872 -0.450339 -0.0743513
+-0.202043 0.866513 -0.444512 -0.103646
+-0.258283 0.851444 -0.436781 -0.132496
+-0.313417 0.832728 -0.427181 -0.160779
+-0.367209 0.810447 -0.415751 -0.188374
+-0.419428 0.784695 -0.40254 -0.215162
+-0.469852 0.755583 -0.387606 -0.241029
+-0.518263 0.723236 -0.371012 -0.265864
+-0.564456 0.687791 -0.352829 -0.28956
+-0.608231 0.649401 -0.333136 -0.312016
+-0.649401 0.608231 -0.312016 -0.333136
+-0.687791 0.564456 -0.28956 -0.352829
+-0.723236 0.518263 -0.265864 -0.371012
+-0.755583 0.469852 -0.241029 -0.387606
+-0.784695 0.419428 -0.215162 -0.40254
+-0.810447 0.367209 -0.188374 -0.415751
+-0.832728 0.313417 -0.160779 -0.427181
+-0.851444 0.258283 -0.132496 -0.436782
+-0.866513 0.202043 -0.103646 -0.444512
+-0.877872 0.144937 -0.0743513 -0.450339
+-0.885472 0.0872113 -0.0447385 -0.454238
+-0.88928 0.0291121 -0.0149342 -0.456191
+0.456191 0.0149342 -0.88928 -0.029112
+0.454238 0.0447385 -0.885472 -0.0872114
+0.450339 0.0743513 -0.877872 -0.144937
+0.444512 0.103646 -0.866513 -0.202043
+0.436782 0.132496 -0.851444 -0.258283
+0.427181 0.160779 -0.832728 -0.313417
+0.415751 0.188374 -0.810447 -0.367209
+0.40254 0.215162 -0.784695 -0.419428
+0.387606 0.241029 -0.755583 -0.469852
+0.371012 0.265863 -0.723236 -0.518263
+0.352829 0.28956 -0.687791 -0.564456
+0.333136 0.312016 -0.649401 -0.608231
+0.312016 0.333136 -0.608231 -0.649401
+0.28956 0.352829 -0.564456 -0.687791
+0.265863 0.371012 -0.518263 -0.723236
+0.241029 0.387606 -0.469852 -0.755583
+0.215162 0.40254 -0.419428 -0.784695
+0.188374 0.415751 -0.367209 -0.810447
+0.160779 0.427181 -0.313417 -0.832728
+0.132496 0.436782 -0.258283 -0.851444
+0.103646 0.444512 -0.202043 -0.866513
+0.0743512 0.450339 -0.144937 -0.877872
+0.0447384 0.454238 -0.0872113 -0.885472
+0.0149341 0.456191 -0.0291119 -0.88928
+-0.0149342 0.456191 0.0291121 -0.88928
+-0.0447385 0.454238 0.0872115 -0.885472
+-0.0743513 0.450339 0.144937 -0.877872
+-0.103646 0.444512 0.202043 -0.866513
+-0.132496 0.436781 0.258283 -0.851444
+-0.160779 0.427181 0.313417 -0.832728
+-0.188374 0.415751 0.367209 -0.810447
+-0.215162 0.40254 0.419428 -0.784695
+-0.241029 0.387606 0.469852 -0.755583
+-0.265864 0.371012 0.518263 -0.723236
+-0.28956 0.352829 0.564456 -0.687791
+-0.312016 0.333136 0.608231 -0.649401
+-0.333136 0.312016 0.649401 -0.608231
+-0.352829 0.28956 0.687791 -0.564456
+-0.371012 0.265864 0.723236 -0.518263
+-0.387606 0.241029 0.755583 -0.469852
+-0.40254 0.215162 0.784695 -0.419428
+-0.415751 0.188374 0.810447 -0.367209
+-0.427181 0.160779 0.832728 -0.313417
+-0.436782 0.132496 0.851444 -0.258283
+-0.444512 0.103646 0.866513 -0.202043
+-0.450339 0.0743513 0.877872 -0.144937
+-0.454238 0.0447385 0.885472 -0.0872113
+-0.456191 0.0149342 0.88928 -0.0291121
+0.499732 0.0163595 -0.858616 -0.113039
+0.497592 0.0490086 -0.849385 -0.168953
+0.493322 0.0814477 -0.836516 -0.224144
+0.486938 0.113538 -0.820066 -0.278375
+0.47847 0.145142 -0.800103 -0.331414
+0.467953 0.176125 -0.776715 -0.383033
+0.455432 0.206354 -0.75 -0.433013
+0.440961 0.235698 -0.720074 -0.481138
+0.424601 0.264034 -0.687064 -0.527203
+0.406423 0.291239 -0.651112 -0.57101
+0.386505 0.317197 -0.612372 -0.612372
+0.364932 0.341796 -0.57101 -0.651112
+0.341796 0.364932 -0.527203 -0.687064
+0.317197 0.386505 -0.481138 -0.720074
+0.291239 0.406423 -0.433013 -0.75
+0.264034 0.424601 -0.383033 -0.776715
+0.235698 0.440961 -0.331414 -0.800103
+0.206353 0.455432 -0.278375 -0.820066
+0.176125 0.467953 -0.224144 -0.836516
+0.145142 0.47847 -0.168953 -0.849385
+0.113538 0.486938 -0.113039 -0.858616
+0.0814477 0.493322 -0.0566407 -0.864171
+0.0490085 0.497592 1.06466e-07 -0.866025
+0.0163595 0.499732 0.0566409 -0.864171
+-0.0163596 0.499732 0.113039 -0.858616
+-0.0490086 0.497592 0.168953 -0.849385
+-0.0814478 0.493322 0.224144 -0.836516
+-0.113538 0.486938 0.278375 -0.820066
+-0.145142 0.47847 0.331414 -0.800103
+-0.176125 0.467953 0.383033 -0.776715
+-0.206354 0.455432 0.433013 -0.75
+-0.235698 0.440961 0.481138 -0.720074
+-0.264034 0.424601 0.527203 -0.687064
+-0.291239 0.406423 0.57101 -0.651112
+-0.317197 0.386505 0.612372 -0.612372
+-0.341796 0.364932 0.651112 -0.57101
+-0.364932 0.341796 0.687064 -0.527203
+-0.386505 0.317197 0.720074 -0.481138
+-0.406423 0.291239 0.75 -0.433013
+-0.424601 0.264034 0.776715 -0.383033
+-0.440961 0.235698 0.800103 -0.331414
+-0.455432 0.206354 0.820066 -0.278375
+-0.467953 0.176125 0.836516 -0.224144
+-0.47847 0.145142 0.849385 -0.168953
+-0.486938 0.113538 0.858616 -0.113039
+-0.493322 0.0814478 0.864171 -0.0566408
+-0.497592 0.0490085 0.866025 4.10824e-08
+-0.499732 0.0163596 0.864171 0.0566407
+0.499732 0.0163595 -0.864171 0.0566408
+0.497592 0.0490086 -0.866025 -2.36595e-09
+0.493322 0.0814477 -0.864171 -0.0566408
+0.486938 0.113538 -0.858616 -0.113039
+0.47847 0.145142 -0.849385 -0.168953
+0.467953 0.176125 -0.836516 -0.224144
+0.455432 0.206354 -0.820066 -0.278375
+0.440961 0.235698 -0.800103 -0.331414
+0.424601 0.264034 -0.776715 -0.383033
+0.406423 0.291239 -0.75 -0.433013
+0.386505 0.317197 -0.720074 -0.481138
+0.364932 0.341796 -0.687064 -0.527203
+0.341796 0.364932 -0.651112 -0.57101
+0.317197 0.386505 -0.612372 -0.612372
+0.291239 0.406423 -0.57101 -0.651112
+0.264034 0.424601 -0.527203 -0.687064
+0.235698 0.440961 -0.481138 -0.720074
+0.206353 0.455432 -0.433013 -0.75
+0.176125 0.467953 -0.383033 -0.776715
+0.145142 0.47847 -0.331414 -0.800103
+0.113538 0.486938 -0.278375 -0.820066
+0.0814477 0.493322 -0.224144 -0.836516
+0.0490085 0.497592 -0.168953 -0.849385
+0.0163595 0.499732 -0.113039 -0.858616
+-0.0163596 0.499732 -0.0566407 -0.864171
+-0.0490086 0.497592 7.24831e-08 -0.866025
+-0.0814478 0.493322 0.0566408 -0.864171
+-0.113538 0.486938 0.113039 -0.858616
+-0.145142 0.47847 0.168953 -0.849385
+-0.176125 0.467953 0.224144 -0.836516
+-0.206354 0.455432 0.278375 -0.820066
+-0.235698 0.440961 0.331413 -0.800103
+-0.264034 0.424601 0.383033 -0.776715
+-0.291239 0.406423 0.433013 -0.75
+-0.317197 0.386505 0.481138 -0.720074
+-0.341796 0.364932 0.527203 -0.687064
+-0.364932 0.341796 0.57101 -0.651112
+-0.386505 0.317197 0.612372 -0.612372
+-0.406423 0.291239 0.651112 -0.57101
+-0.424601 0.264034 0.687064 -0.527203
+-0.440961 0.235698 0.720074 -0.481138
+-0.455432 0.206354 0.75 -0.433013
+-0.467953 0.176125 0.776715 -0.383033
+-0.47847 0.145142 0.800103 -0.331414
+-0.486938 0.113538 0.820066 -0.278375
+-0.493322 0.0814478 0.836516 -0.224144
+-0.497592 0.0490085 0.849385 -0.168953
+-0.499732 0.0163596 0.858616 -0.113039
+0.539773 0.0176703 -0.841175 -0.0275372
+0.537461 0.0529353 -0.837573 -0.0824937
+0.532848 0.0879736 -0.830384 -0.137097
+0.525954 0.122635 -0.81964 -0.191113
+0.516807 0.156772 -0.805385 -0.244311
+0.505447 0.190237 -0.787682 -0.296463
+0.491923 0.222887 -0.766606 -0.347345
+0.476292 0.254583 -0.742247 -0.396739
+0.458622 0.285189 -0.71471 -0.444435
+0.438987 0.314574 -0.684112 -0.490228
+0.417473 0.342612 -0.650585 -0.533921
+0.394172 0.369182 -0.614272 -0.575329
+0.369182 0.394172 -0.575329 -0.614272
+0.342612 0.417473 -0.533922 -0.650585
+0.314574 0.438987 -0.490228 -0.684112
+0.285189 0.458622 -0.444435 -0.71471
+0.254583 0.476292 -0.396739 -0.742247
+0.222887 0.491923 -0.347345 -0.766606
+0.190237 0.505447 -0.296463 -0.787682
+0.156772 0.516807 -0.244311 -0.805385
+0.122635 0.525954 -0.191113 -0.81964
+0.0879735 0.532848 -0.137097 -0.830384
+0.0529352 0.537461 -0.0824936 -0.837573
+0.0176703 0.539773 -0.0275371 -0.841175
+-0.0176704 0.539773 0.0275373 -0.841175
+-0.0529354 0.537461 0.0824938 -0.837573
+-0.0879736 0.532848 0.137097 -0.830384
+-0.122635 0.525954 0.191113 -0.81964
+-0.156772 0.516807 0.244311 -0.805385
+-0.190237 0.505447 0.296463 -0.787682
+-0.222887 0.491923 0.347345 -0.766606
+-0.254583 0.476292 0.396739 -0.742247
+-0.285189 0.458622 0.444435 -0.71471
+-0.314574 0.438987 0.490228 -0.684112
+-0.342612 0.417473 0.533921 -0.650585
+-0.369182 0.394172 0.575329 -0.614272
+-0.394172 0.369182 0.614272 -0.575329
+-0.417473 0.342612 0.650585 -0.533921
+-0.438987 0.314574 0.684112 -0.490228
+-0.458622 0.285189 0.71471 -0.444435
+-0.476292 0.254583 0.742247 -0.39674
+-0.491923 0.222887 0.766606 -0.347345
+-0.505447 0.190237 0.787682 -0.296463
+-0.516807 0.156772 0.805385 -0.244311
+-0.525954 0.122635 0.81964 -0.191113
+-0.532848 0.0879736 0.830384 -0.137097
+-0.537461 0.0529353 0.837573 -0.0824937
+-0.539773 0.0176704 0.841175 -0.0275373
+0.539773 0.0176703 -0.81964 -0.191113
+0.537461 0.0529353 -0.805385 -0.244311
+0.532848 0.0879736 -0.787682 -0.296463
+0.525954 0.122635 -0.766606 -0.347345
+0.516807 0.156772 -0.742247 -0.396739
+0.505447 0.190237 -0.71471 -0.444435
+0.491923 0.222887 -0.684112 -0.490228
+0.476292 0.254583 -0.650585 -0.533922
+0.458622 0.285189 -0.614272 -0.575329
+0.438987 0.314574 -0.575329 -0.614272
+0.417473 0.342612 -0.533922 -0.650585
+0.394172 0.369182 -0.490228 -0.684112
+0.369182 0.394172 -0.444435 -0.71471
+0.342612 0.417473 -0.396739 -0.742247
+0.314574 0.438987 -0.347345 -0.766606
+0.285189 0.458622 -0.296463 -0.787682
+0.254583 0.476292 -0.244311 -0.805385
+0.222887 0.491923 -0.191113 -0.81964
+0.190237 0.505447 -0.137097 -0.830384
+0.156772 0.516807 -0.0824937 -0.837573
+0.122635 0.525954 -0.0275372 -0.841175
+0.0879735 0.532848 0.0275373 -0.841175
+0.0529352 0.537461 0.0824938 -0.837573
+0.0176703 0.539773 0.137097 -0.830384
+-0.0176704 0.539773 0.191113 -0.81964
+-0.0529354 0.537461 0.244311 -0.805385
+-0.0879736 0.532848 0.296463 -0.787682
+-0.122635 0.525954 0.347345 -0.766606
+-0.156772 0.516807 0.39674 -0.742247
+-0.190237 0.505447 0.444435 -0.71471
+-0.222887 0.491923 0.490228 -0.684112
+-0.254583 0.476292 0.533921 -0.650585
+-0.285189 0.458622 0.575329 -0.614272
+-0.314574 0.438987 0.614272 -0.575329
+-0.342612 0.417473 0.650585 -0.533922
+-0.369182 0.394172 0.684112 -0.490228
+-0.394172 0.369182 0.71471 -0.444435
+-0.417473 0.342612 0.742247 -0.396739
+-0.438987 0.314574 0.766606 -0.347345
+-0.458622 0.285189 0.787682 -0.296463
+-0.476292 0.254583 0.805385 -0.244311
+-0.491923 0.222887 0.81964 -0.191113
+-0.505447 0.190237 0.830384 -0.137097
+-0.516807 0.156772 0.837573 -0.0824937
+-0.525954 0.122635 0.841175 -0.0275371
+-0.532848 0.0879736 0.841175 0.0275372
+-0.537461 0.0529353 0.837573 0.0824938
+-0.539773 0.0176704 0.830384 0.137097
+0.577041 0.0188904 -0.773165 -0.262454
+0.57457 0.0565902 -0.754344 -0.31246
+0.569639 0.0940477 -0.732294 -0.361127
+0.562268 0.131103 -0.707107 -0.408248
+0.55249 0.167596 -0.678892 -0.453621
+0.540346 0.203372 -0.64777 -0.497052
+0.525887 0.238277 -0.613875 -0.538354
+0.509177 0.272161 -0.57735 -0.57735
+0.490287 0.30488 -0.538354 -0.613875
+0.469297 0.336294 -0.497052 -0.64777
+0.446298 0.366267 -0.453621 -0.678892
+0.421387 0.394672 -0.408248 -0.707107
+0.394672 0.421387 -0.361127 -0.732294
+0.366267 0.446298 -0.31246 -0.754344
+0.336294 0.469297 -0.262454 -0.773165
+0.30488 0.490287 -0.211325 -0.788675
+0.272161 0.509178 -0.159291 -0.800808
+0.238276 0.525887 -0.106574 -0.809511
+0.203372 0.540346 -0.0534014 -0.814748
+0.167596 0.55249 3.50817e-08 -0.816497
+0.131103 0.562268 0.0534015 -0.814748
+0.0940477 0.569639 0.106574 -0.809511
+0.0565902 0.57457 0.159291 -0.800808
+0.0188903 0.577041 0.211325 -0.788675
+-0.0188904 0.577041 0.262454 -0.773165
+-0.0565903 0.57457 0.31246 -0.754344
+-0.0940478 0.569639 0.361127 -0.732294
+-0.131103 0.562268 0.408248 -0.707107
+-0.167596 0.55249 0.453621 -0.678892
+-0.203372 0.540346 0.497052 -0.64777
+-0.238277 0.525887 0.538354 -0.613875
+-0.272161 0.509178 0.57735 -0.57735
+-0.30488 0.490287 0.613875 -0.538354
+-0.336294 0.469297 0.64777 -0.497052
+-0.366267 0.446298 0.678892 -0.453621
+-0.394672 0.421387 0.707107 -0.408248
+-0.421387 0.394672 0.732294 -0.361127
+-0.446298 0.366267 0.754344 -0.31246
+-0.469297 0.336294 0.773165 -0.262454
+-0.490287 0.30488 0.788675 -0.211325
+-0.509177 0.272161 0.800808 -0.159291
+-0.525887 0.238277 0.809511 -0.106574
+-0.540346 0.203372 0.814748 -0.0534014
+-0.55249 0.167596 0.816497 -2.65621e-08
+-0.562268 0.131103 0.814748 0.0534015
+-0.569639 0.0940478 0.809511 0.106574
+-0.57457 0.0565902 0.800808 0.159291
+-0.577041 0.0188904 0.788675 0.211325
+0.577041 0.0188904 -0.809511 -0.106574
+0.57457 0.0565902 -0.800808 -0.159291
+0.569639 0.0940477 -0.788675 -0.211325
+0.562268 0.131103 -0.773165 -0.262454
+0.55249 0.167596 -0.754344 -0.31246
+0.540346 0.203372 -0.732294 -0.361127
+0.525887 0.238277 -0.707107 -0.408248
+0.509177 0.272161 -0.678892 -0.453621
+0.490287 0.30488 -0.64777 -0.497052
+0.469297 0.336294 -0.613875 -0.538354
+0.446298 0.366267 -0.57735 -0.57735
+0.421387 0.394672 -0.538354 -0.613875
+0.394672 0.421387 -0.497052 -0.64777
+0.366267 0.446298 -0.453621 -0.678892
+0.336294 0.469297 -0.408248 -0.707107
+0.30488 0.490287 -0.361127 -0.732294
+0.272161 0.509178 -0.31246 -0.754345
+0.238276 0.525887 -0.262454 -0.773165
+0.203372 0.540346 -0.211325 -0.788675
+0.167596 0.55249 -0.159291 -0.800808
+0.131103 0.562268 -0.106574 -0.809511
+0.0940477 0.569639 -0.0534013 -0.814748
+0.0565902 0.57457 1.00377e-07 -0.816497
+0.0188903 0.577041 0.0534015 -0.814748
+-0.0188904 0.577041 0.106574 -0.809511
+-0.0565903 0.57457 0.159291 -0.800808
+-0.0940478 0.569639 0.211325 -0.788675
+-0.131103 0.562268 0.262454 -0.773165
+-0.167596 0.55249 0.31246 -0.754344
+-0.203372 0.540346 0.361127 -0.732293
+-0.238277 0.525887 0.408248 -0.707107
+-0.272161 0.509178 0.453621 -0.678892
+-0.30488 0.490287 0.497052 -0.64777
+-0.336294 0.469297 0.538354 -0.613875
+-0.366267 0.446298 0.57735 -0.57735
+-0.394672 0.421387 0.613875 -0.538354
+-0.421387 0.394672 0.64777 -0.497052
+-0.446298 0.366267 0.678892 -0.453621
+-0.469297 0.336294 0.707107 -0.408248
+-0.490287 0.30488 0.732294 -0.361127
+-0.509177 0.272161 0.754344 -0.31246
+-0.525887 0.238277 0.773165 -0.262454
+-0.540346 0.203372 0.788675 -0.211325
+-0.55249 0.167596 0.800808 -0.159291
+-0.562268 0.131103 0.809511 -0.106574
+-0.569639 0.0940478 0.814748 -0.0534015
+-0.57457 0.0565902 0.816497 3.87329e-08
+-0.577041 0.0188904 0.814748 0.0534014
+0.612045 0.0200363 -0.769917 -0.17952
+0.609424 0.060023 -0.756528 -0.22949
+0.604193 0.0997527 -0.739899 -0.278478
+0.596375 0.139055 -0.720101 -0.326274
+0.586004 0.177762 -0.69722 -0.372672
+0.573123 0.215708 -0.671353 -0.417474
+0.557788 0.25273 -0.642612 -0.460489
+0.540064 0.28867 -0.611118 -0.501532
+0.520028 0.323374 -0.577008 -0.540427
+0.497765 0.356693 -0.540427 -0.577008
+0.47337 0.388485 -0.501532 -0.611118
+0.446949 0.418613 -0.460489 -0.642612
+0.418613 0.446949 -0.417474 -0.671353
+0.388485 0.47337 -0.372672 -0.69722
+0.356693 0.497765 -0.326274 -0.720101
+0.323374 0.520028 -0.278478 -0.739899
+0.28867 0.540064 -0.22949 -0.756528
+0.25273 0.557788 -0.179519 -0.769917
+0.215708 0.573123 -0.12878 -0.78001
+0.177762 0.586004 -0.0774893 -0.786763
+0.139055 0.596375 -0.0258667 -0.790146
+0.0997526 0.604193 0.0258668 -0.790146
+0.0600229 0.609424 0.0774895 -0.786763
+0.0200362 0.612045 0.12878 -0.78001
+-0.0200363 0.612045 0.17952 -0.769917
+-0.060023 0.609424 0.22949 -0.756528
+-0.0997527 0.604193 0.278478 -0.739899
+-0.139055 0.596375 0.326274 -0.720101
+-0.177762 0.586004 0.372672 -0.69722
+-0.215708 0.573123 0.417474 -0.671353
+-0.25273 0.557788 0.460489 -0.642612
+-0.28867 0.540064 0.501532 -0.611118
+-0.323374 0.520028 0.540427 -0.577008
+-0.356693 0.497765 0.577008 -0.540427
+-0.388485 0.47337 0.611118 -0.501532
+-0.418613 0.446949 0.642612 -0.460489
+-0.446949 0.418613 0.671353 -0.417474
+-0.47337 0.388485 0.69722 -0.372672
+-0.497765 0.356693 0.720101 -0.326274
+-0.520028 0.323374 0.739899 -0.278478
+-0.540064 0.28867 0.756528 -0.22949
+-0.557788 0.25273 0.769917 -0.17952
+-0.573123 0.215708 0.78001 -0.12878
+-0.586004 0.177762 0.786763 -0.0774894
+-0.596375 0.139055 0.790146 -0.0258666
+-0.604193 0.0997527 0.790146 0.0258667
+-0.609424 0.060023 0.786763 0.0774894
+-0.612045 0.0200363 0.78001 0.12878
+0.539773 0.0176703 -0.830384 0.137097
+0.537461 0.0529353 -0.837573 0.0824937
+0.532848 0.0879736 -0.841175 0.0275372
+0.525954 0.122635 -0.841175 -0.0275372
+0.516807 0.156772 -0.837573 -0.0824937
+0.505447 0.190237 -0.830384 -0.137097
+0.491923 0.222887 -0.81964 -0.191113
+0.476292 0.254583 -0.805385 -0.244311
+0.458622 0.285189 -0.787682 -0.296463
+0.438987 0.314574 -0.766606 -0.347345
+0.417473 0.342612 -0.742247 -0.396739
+0.394172 0.369182 -0.71471 -0.444435
+0.369182 0.394172 -0.684112 -0.490228
+0.342612 0.417473 -0.650585 -0.533922
+0.314574 0.438987 -0.614272 -0.575329
+0.285189 0.458622 -0.575329 -0.614272
+0.254583 0.476292 -0.533921 -0.650585
+0.222887 0.491923 -0.490228 -0.684112
+0.190237 0.505447 -0.444435 -0.71471
+0.156772 0.516807 -0.396739 -0.742247
+0.122635 0.525954 -0.347345 -0.766606
+0.0879735 0.532848 -0.296462 -0.787682
+0.0529352 0.537461 -0.244311 -0.805385
+0.0176703 0.539773 -0.191113 -0.81964
+-0.0176704 0.539773 -0.137097 -0.830384
+-0.0529354 0.537461 -0.0824936 -0.837573
+-0.0879736 0.532848 -0.0275372 -0.841175
+-0.122635 0.525954 0.0275373 -0.841175
+-0.156772 0.516807 0.0824939 -0.837573
+-0.190237 0.505447 0.137097 -0.830384
+-0.222887 0.491923 0.191113 -0.81964
+-0.254583 0.476292 0.244311 -0.805385
+-0.285189 0.458622 0.296463 -0.787682
+-0.314574 0.438987 0.347345 -0.766606
+-0.342612 0.417473 0.396739 -0.742247
+-0.369182 0.394172 0.444435 -0.71471
+-0.394172 0.369182 0.490228 -0.684112
+-0.417473 0.342612 0.533922 -0.650585
+-0.438987 0.314574 0.575329 -0.614272
+-0.458622 0.285189 0.614272 -0.575329
+-0.476292 0.254583 0.650585 -0.533922
+-0.491923 0.222887 0.684112 -0.490228
+-0.505447 0.190237 0.71471 -0.444435
+-0.516807 0.156772 0.742247 -0.396739
+-0.525954 0.122635 0.766606 -0.347345
+-0.532848 0.0879736 0.787682 -0.296463
+-0.537461 0.0529353 0.805385 -0.244311
+-0.539773 0.0176704 0.81964 -0.191113
+0.577041 0.0188904 -0.814748 0.0534014
+0.57457 0.0565902 -0.816497 -2.23064e-09
+0.569639 0.0940477 -0.814748 -0.0534014
+0.562268 0.131103 -0.809511 -0.106574
+0.55249 0.167596 -0.800808 -0.159291
+0.540346 0.203372 -0.788675 -0.211325
+0.525887 0.238277 -0.773165 -0.262454
+0.509177 0.272161 -0.754344 -0.31246
+0.490287 0.30488 -0.732294 -0.361127
+0.469297 0.336294 -0.707107 -0.408248
+0.446298 0.366267 -0.678892 -0.453621
+0.421387 0.394672 -0.64777 -0.497052
+0.394672 0.421387 -0.613875 -0.538354
+0.366267 0.446298 -0.57735 -0.57735
+0.336294 0.469297 -0.538354 -0.613875
+0.30488 0.490287 -0.497052 -0.64777
+0.272161 0.509178 -0.453621 -0.678892
+0.238276 0.525887 -0.408248 -0.707107
+0.203372 0.540346 -0.361127 -0.732294
+0.167596 0.55249 -0.31246 -0.754344
+0.131103 0.562268 -0.262454 -0.773165
+0.0940477 0.569639 -0.211325 -0.788675
+0.0565902 0.57457 -0.15929 -0.800808
+0.0188903 0.577041 -0.106574 -0.809511
+-0.0188904 0.577041 -0.0534014 -0.814748
+-0.0565903 0.57457 6.83377e-08 -0.816497
+-0.0940478 0.569639 0.0534015 -0.814748
+-0.131103 0.562268 0.106574 -0.809511
+-0.167596 0.55249 0.159291 -0.800808
+-0.203372 0.540346 0.211325 -0.788675
+-0.238277 0.525887 0.262454 -0.773165
+-0.272161 0.509178 0.31246 -0.754345
+-0.30488 0.490287 0.361127 -0.732294
+-0.336294 0.469297 0.408248 -0.707107
+-0.366267 0.446298 0.453621 -0.678892
+-0.394672 0.421387 0.497052 -0.64777
+-0.421387 0.394672 0.538354 -0.613875
+-0.446298 0.366267 0.57735 -0.57735
+-0.469297 0.336294 0.613875 -0.538354
+-0.490287 0.30488 0.64777 -0.497052
+-0.509177 0.272161 0.678892 -0.453621
+-0.525887 0.238277 0.707107 -0.408248
+-0.540346 0.203372 0.732294 -0.361127
+-0.55249 0.167596 0.754344 -0.31246
+-0.562268 0.131103 0.773165 -0.262454
+-0.569639 0.0940478 0.788675 -0.211325
+-0.57457 0.0565902 0.800808 -0.159291
+-0.577041 0.0188904 0.809511 -0.106574
+0.577041 0.0188904 -0.788675 0.211325
+0.57457 0.0565902 -0.800808 0.159291
+0.569639 0.0940477 -0.809511 0.106574
+0.562268 0.131103 -0.814748 0.0534014
+0.55249 0.167596 -0.816497 -6.08539e-10
+0.540346 0.203372 -0.814748 -0.0534014
+0.525887 0.238277 -0.809511 -0.106574
+0.509177 0.272161 -0.800808 -0.159291
+0.490287 0.30488 -0.788675 -0.211325
+0.469297 0.336294 -0.773165 -0.262454
+0.446298 0.366267 -0.754344 -0.31246
+0.421387 0.394672 -0.732294 -0.361127
+0.394672 0.421387 -0.707107 -0.408248
+0.366267 0.446298 -0.678892 -0.453621
+0.336294 0.469297 -0.64777 -0.497052
+0.30488 0.490287 -0.613875 -0.538354
+0.272161 0.509178 -0.57735 -0.57735
+0.238276 0.525887 -0.538354 -0.613875
+0.203372 0.540346 -0.497052 -0.64777
+0.167596 0.55249 -0.453621 -0.678892
+0.131103 0.562268 -0.408248 -0.707107
+0.0940477 0.569639 -0.361127 -0.732294
+0.0565902 0.57457 -0.31246 -0.754345
+0.0188903 0.577041 -0.262454 -0.773165
+-0.0188904 0.577041 -0.211325 -0.788675
+-0.0565903 0.57457 -0.159291 -0.800808
+-0.0940478 0.569639 -0.106574 -0.809511
+-0.131103 0.562268 -0.0534014 -0.814748
+-0.167596 0.55249 1.33633e-07 -0.816497
+-0.203372 0.540346 0.0534016 -0.814748
+-0.238277 0.525887 0.106574 -0.809511
+-0.272161 0.509178 0.15929 -0.800808
+-0.30488 0.490287 0.211325 -0.788675
+-0.336294 0.469297 0.262454 -0.773165
+-0.366267 0.446298 0.31246 -0.754344
+-0.394672 0.421387 0.361127 -0.732294
+-0.421387 0.394672 0.408248 -0.707107
+-0.446298 0.366267 0.453621 -0.678892
+-0.469297 0.336294 0.497052 -0.64777
+-0.490287 0.30488 0.538354 -0.613875
+-0.509177 0.272161 0.57735 -0.57735
+-0.525887 0.238277 0.613875 -0.538354
+-0.540346 0.203372 0.64777 -0.497052
+-0.55249 0.167596 0.678892 -0.453621
+-0.562268 0.131103 0.707107 -0.408248
+-0.569639 0.0940478 0.732294 -0.361127
+-0.57457 0.0565902 0.754344 -0.31246
+-0.577041 0.0188904 0.773165 -0.262454
+0.612045 0.0200363 -0.78001 0.12878
+0.609424 0.060023 -0.786763 0.0774894
+0.604193 0.0997527 -0.790146 0.0258667
+0.596375 0.139055 -0.790146 -0.0258667
+0.586004 0.177762 -0.786763 -0.0774894
+0.573123 0.215708 -0.78001 -0.12878
+0.557788 0.25273 -0.769917 -0.17952
+0.540064 0.28867 -0.756528 -0.22949
+0.520028 0.323374 -0.739899 -0.278478
+0.497765 0.356693 -0.720101 -0.326274
+0.47337 0.388485 -0.69722 -0.372672
+0.446949 0.418613 -0.671353 -0.417474
+0.418613 0.446949 -0.642612 -0.460489
+0.388485 0.47337 -0.611118 -0.501532
+0.356693 0.497765 -0.577008 -0.540427
+0.323374 0.520028 -0.540427 -0.577008
+0.28867 0.540064 -0.501532 -0.611118
+0.25273 0.557788 -0.460489 -0.642612
+0.215708 0.573123 -0.417474 -0.671353
+0.177762 0.586004 -0.372672 -0.69722
+0.139055 0.596375 -0.326274 -0.720101
+0.0997526 0.604193 -0.278478 -0.739899
+0.0600229 0.609424 -0.22949 -0.756528
+0.0200362 0.612045 -0.179519 -0.769917
+-0.0200363 0.612045 -0.12878 -0.78001
+-0.060023 0.609424 -0.0774893 -0.786763
+-0.0997527 0.604193 -0.0258667 -0.790146
+-0.139055 0.596375 0.0258668 -0.790146
+-0.177762 0.586004 0.0774895 -0.786763
+-0.215708 0.573123 0.12878 -0.78001
+-0.25273 0.557788 0.17952 -0.769917
+-0.28867 0.540064 0.22949 -0.756528
+-0.323374 0.520028 0.278478 -0.739899
+-0.356693 0.497765 0.326274 -0.720101
+-0.388485 0.47337 0.372672 -0.69722
+-0.418613 0.446949 0.417474 -0.671353
+-0.446949 0.418613 0.460489 -0.642612
+-0.47337 0.388485 0.501532 -0.611118
+-0.497765 0.356693 0.540427 -0.577008
+-0.520028 0.323374 0.577008 -0.540427
+-0.540064 0.28867 0.611118 -0.501532
+-0.557788 0.25273 0.642612 -0.460489
+-0.573123 0.215708 0.671353 -0.417474
+-0.586004 0.177762 0.69722 -0.372672
+-0.596375 0.139055 0.720101 -0.326274
+-0.604193 0.0997527 0.739899 -0.278478
+-0.609424 0.060023 0.756528 -0.22949
+-0.612045 0.0200363 0.769917 -0.17952
+0.612045 0.0200363 -0.790146 -0.0258667
+0.609424 0.060023 -0.786763 -0.0774894
+0.604193 0.0997527 -0.78001 -0.12878
+0.596375 0.139055 -0.769917 -0.17952
+0.586004 0.177762 -0.756528 -0.22949
+0.573123 0.215708 -0.739899 -0.278478
+0.557788 0.25273 -0.720101 -0.326274
+0.540064 0.28867 -0.69722 -0.372672
+0.520028 0.323374 -0.671353 -0.417474
+0.497765 0.356693 -0.642612 -0.460489
+0.47337 0.388485 -0.611118 -0.501532
+0.446949 0.418613 -0.577008 -0.540427
+0.418613 0.446949 -0.540427 -0.577008
+0.388485 0.47337 -0.501532 -0.611118
+0.356693 0.497765 -0.460489 -0.642612
+0.323374 0.520028 -0.417474 -0.671353
+0.28867 0.540064 -0.372672 -0.69722
+0.25273 0.557788 -0.326274 -0.720101
+0.215708 0.573123 -0.278478 -0.739899
+0.177762 0.586004 -0.22949 -0.756528
+0.139055 0.596375 -0.17952 -0.769917
+0.0997526 0.604193 -0.12878 -0.78001
+0.0600229 0.609424 -0.0774893 -0.786763
+0.0200362 0.612045 -0.0258666 -0.790146
+-0.0200363 0.612045 0.0258668 -0.790146
+-0.060023 0.609424 0.0774894 -0.786763
+-0.0997527 0.604193 0.12878 -0.78001
+-0.139055 0.596375 0.17952 -0.769917
+-0.177762 0.586004 0.22949 -0.756528
+-0.215708 0.573123 0.278478 -0.739899
+-0.25273 0.557788 0.326274 -0.720101
+-0.28867 0.540064 0.372672 -0.69722
+-0.323374 0.520028 0.417474 -0.671353
+-0.356693 0.497765 0.460489 -0.642612
+-0.388485 0.47337 0.501532 -0.611118
+-0.418613 0.446949 0.540427 -0.577008
+-0.446949 0.418613 0.577008 -0.540427
+-0.47337 0.388485 0.611118 -0.501532
+-0.497765 0.356693 0.642612 -0.460489
+-0.520028 0.323374 0.671353 -0.417474
+-0.540064 0.28867 0.69722 -0.372672
+-0.557788 0.25273 0.720101 -0.326274
+-0.573123 0.215708 0.739899 -0.278478
+-0.586004 0.177762 0.756528 -0.22949
+-0.596375 0.139055 0.769917 -0.179519
+-0.604193 0.0997527 0.78001 -0.12878
+-0.609424 0.060023 0.786763 -0.0774893
+-0.612045 0.0200363 0.790146 -0.0258668
+0.645152 0.0211201 -0.757229 -0.099691
+0.642389 0.0632698 -0.749087 -0.149003
+0.636876 0.105149 -0.737738 -0.197676
+0.628635 0.146577 -0.72323 -0.245503
+0.617702 0.187378 -0.705625 -0.292279
+0.604125 0.227376 -0.684998 -0.337804
+0.58796 0.266401 -0.661438 -0.381881
+0.569278 0.304285 -0.635045 -0.424324
+0.548158 0.340866 -0.605934 -0.464949
+0.52469 0.375988 -0.574227 -0.503584
+0.498976 0.409499 -0.540062 -0.540062
+0.471125 0.441257 -0.503584 -0.574227
+0.441257 0.471125 -0.464949 -0.605934
+0.409499 0.498976 -0.424324 -0.635045
+0.375988 0.52469 -0.381881 -0.661438
+0.340866 0.548158 -0.337804 -0.684998
+0.304285 0.569278 -0.292279 -0.705625
+0.266401 0.58796 -0.245503 -0.72323
+0.227376 0.604125 -0.197676 -0.737738
+0.187378 0.617702 -0.149003 -0.749087
+0.146577 0.628635 -0.099691 -0.757229
+0.105148 0.636876 -0.0499524 -0.762127
+0.0632697 0.642389 9.38938e-08 -0.763763
+0.02112 0.645152 0.0499525 -0.762127
+-0.0211201 0.645152 0.0996911 -0.757229
+-0.0632698 0.642389 0.149003 -0.749087
+-0.105149 0.636876 0.197676 -0.737738
+-0.146577 0.628635 0.245503 -0.72323
+-0.187378 0.617702 0.292279 -0.705625
+-0.227377 0.604125 0.337804 -0.684998
+-0.266401 0.58796 0.381881 -0.661438
+-0.304285 0.569278 0.424324 -0.635045
+-0.340866 0.548158 0.464949 -0.605934
+-0.375988 0.52469 0.503584 -0.574227
+-0.409499 0.498976 0.540062 -0.540062
+-0.441257 0.471125 0.574227 -0.503584
+-0.471125 0.441257 0.605934 -0.464949
+-0.498976 0.409499 0.635045 -0.424324
+-0.52469 0.375988 0.661438 -0.381881
+-0.548158 0.340866 0.684998 -0.337804
+-0.569278 0.304285 0.705625 -0.292279
+-0.58796 0.266401 0.72323 -0.245503
+-0.604125 0.227376 0.737738 -0.197676
+-0.617702 0.187378 0.749087 -0.149003
+-0.628635 0.146577 0.757229 -0.099691
+-0.636876 0.105149 0.762127 -0.0499525
+-0.642389 0.0632698 0.763763 3.62313e-08
+-0.645152 0.0211201 0.762127 0.0499524
+0.645152 0.0211201 -0.762127 0.0499525
+0.642389 0.0632698 -0.763763 -2.08657e-09
+0.636876 0.105149 -0.762127 -0.0499525
+0.628635 0.146577 -0.757229 -0.099691
+0.617702 0.187378 -0.749087 -0.149003
+0.604125 0.227376 -0.737738 -0.197676
+0.58796 0.266401 -0.72323 -0.245503
+0.569278 0.304285 -0.705625 -0.292279
+0.548158 0.340866 -0.684998 -0.337804
+0.52469 0.375988 -0.661438 -0.381881
+0.498976 0.409499 -0.635045 -0.424324
+0.471125 0.441257 -0.605934 -0.464949
+0.441257 0.471125 -0.574227 -0.503584
+0.409499 0.498976 -0.540062 -0.540062
+0.375988 0.52469 -0.503584 -0.574227
+0.340866 0.548158 -0.464949 -0.605934
+0.304285 0.569278 -0.424324 -0.635045
+0.266401 0.58796 -0.381881 -0.661438
+0.227376 0.604125 -0.337804 -0.684998
+0.187378 0.617702 -0.292279 -0.705625
+0.146577 0.628635 -0.245503 -0.72323
+0.105148 0.636876 -0.197676 -0.737738
+0.0632697 0.642389 -0.149003 -0.749087
+0.02112 0.645152 -0.0996909 -0.757229
+-0.0211201 0.645152 -0.0499524 -0.762127
+-0.0632698 0.642389 6.39241e-08 -0.763763
+-0.105149 0.636876 0.0499525 -0.762127
+-0.146577 0.628635 0.0996911 -0.757229
+-0.187378 0.617702 0.149003 -0.749087
+-0.227377 0.604125 0.197676 -0.737738
+-0.266401 0.58796 0.245504 -0.72323
+-0.304285 0.569278 0.292279 -0.705625
+-0.340866 0.548158 0.337804 -0.684998
+-0.375988 0.52469 0.381881 -0.661438
+-0.409499 0.498976 0.424324 -0.635045
+-0.441257 0.471125 0.464949 -0.605934
+-0.471125 0.441257 0.503584 -0.574227
+-0.498976 0.409499 0.540062 -0.540062
+-0.52469 0.375988 0.574227 -0.503584
+-0.548158 0.340866 0.605934 -0.464949
+-0.569278 0.304285 0.635045 -0.424324
+-0.58796 0.266401 0.661438 -0.381881
+-0.604125 0.227376 0.684998 -0.337803
+-0.617702 0.187378 0.705625 -0.292279
+-0.628635 0.146577 0.72323 -0.245503
+-0.636876 0.105149 0.737738 -0.197676
+-0.642389 0.0632698 0.749087 -0.149003
+-0.645152 0.0211201 0.757229 -0.0996911
+0.676641 0.0221509 -0.735586 -0.0240806
+0.673743 0.0663579 -0.732436 -0.0721387
+0.667961 0.110281 -0.72615 -0.119888
+0.659318 0.153731 -0.716754 -0.167124
+0.647852 0.196524 -0.704289 -0.213644
+0.633611 0.238474 -0.688808 -0.259249
+0.616658 0.279404 -0.670378 -0.303744
+0.597064 0.319137 -0.649076 -0.346939
+0.574913 0.357504 -0.624996 -0.388647
+0.5503 0.394339 -0.598239 -0.428692
+0.523331 0.429486 -0.56892 -0.466901
+0.49412 0.462794 -0.537165 -0.50311
+0.462794 0.49412 -0.50311 -0.537165
+0.429486 0.523331 -0.466901 -0.56892
+0.394339 0.5503 -0.428692 -0.598239
+0.357504 0.574913 -0.388647 -0.624996
+0.319137 0.597064 -0.346939 -0.649077
+0.279404 0.616658 -0.303744 -0.670378
+0.238474 0.633611 -0.259249 -0.688808
+0.196524 0.647852 -0.213644 -0.704289
+0.153731 0.659318 -0.167124 -0.716754
+0.110281 0.667961 -0.119888 -0.72615
+0.0663578 0.673743 -0.0721386 -0.732436
+0.0221508 0.676641 -0.0240805 -0.735586
+-0.022151 0.676641 0.0240807 -0.735586
+-0.066358 0.673743 0.0721387 -0.732436
+-0.110281 0.667961 0.119888 -0.72615
+-0.153731 0.659318 0.167124 -0.716754
+-0.196524 0.647852 0.213644 -0.704289
+-0.238475 0.633611 0.259249 -0.688808
+-0.279404 0.616658 0.303744 -0.670378
+-0.319137 0.597064 0.346939 -0.649077
+-0.357504 0.574913 0.388647 -0.624996
+-0.394339 0.5503 0.428692 -0.598239
+-0.429486 0.523331 0.466901 -0.56892
+-0.462794 0.49412 0.50311 -0.537165
+-0.49412 0.462794 0.537165 -0.50311
+-0.523331 0.429486 0.56892 -0.466901
+-0.5503 0.394339 0.598239 -0.428692
+-0.574913 0.357504 0.624996 -0.388647
+-0.597063 0.319137 0.649076 -0.346939
+-0.616658 0.279404 0.670378 -0.303744
+-0.633611 0.238474 0.688808 -0.259249
+-0.647852 0.196524 0.704289 -0.213644
+-0.659318 0.153731 0.716754 -0.167124
+-0.667961 0.110281 0.72615 -0.119888
+-0.673743 0.0663579 0.732436 -0.0721386
+-0.676641 0.022151 0.735586 -0.0240807
+0.612045 0.0200363 -0.720101 -0.326274
+0.609424 0.060023 -0.69722 -0.372672
+0.604193 0.0997527 -0.671353 -0.417474
+0.596375 0.139055 -0.642612 -0.460489
+0.586004 0.177762 -0.611118 -0.501532
+0.573123 0.215708 -0.577008 -0.540427
+0.557788 0.25273 -0.540427 -0.577008
+0.540064 0.28867 -0.501532 -0.611118
+0.520028 0.323374 -0.460489 -0.642612
+0.497765 0.356693 -0.417474 -0.671353
+0.47337 0.388485 -0.372672 -0.69722
+0.446949 0.418613 -0.326274 -0.720101
+0.418613 0.446949 -0.278478 -0.739899
+0.388485 0.47337 -0.22949 -0.756528
+0.356693 0.497765 -0.17952 -0.769917
+0.323374 0.520028 -0.12878 -0.78001
+0.28867 0.540064 -0.0774893 -0.786763
+0.25273 0.557788 -0.0258667 -0.790146
+0.215708 0.573123 0.0258668 -0.790146
+0.177762 0.586004 0.0774894 -0.786763
+0.139055 0.596375 0.12878 -0.78001
+0.0997526 0.604193 0.17952 -0.769917
+0.0600229 0.609424 0.22949 -0.756528
+0.0200362 0.612045 0.278478 -0.739899
+-0.0200363 0.612045 0.326274 -0.720101
+-0.060023 0.609424 0.372672 -0.69722
+-0.0997527 0.604193 0.417474 -0.671353
+-0.139055 0.596375 0.460489 -0.642612
+-0.177762 0.586004 0.501532 -0.611118
+-0.215708 0.573123 0.540427 -0.577008
+-0.25273 0.557788 0.577008 -0.540427
+-0.28867 0.540064 0.611118 -0.501532
+-0.323374 0.520028 0.642612 -0.460489
+-0.356693 0.497765 0.671353 -0.417474
+-0.388485 0.47337 0.69722 -0.372672
+-0.418613 0.446949 0.720101 -0.326274
+-0.446949 0.418613 0.739899 -0.278478
+-0.47337 0.388485 0.756528 -0.22949
+-0.497765 0.356693 0.769917 -0.17952
+-0.520028 0.323374 0.78001 -0.12878
+-0.540064 0.28867 0.786763 -0.0774894
+-0.557788 0.25273 0.790146 -0.0258667
+-0.573123 0.215708 0.790146 0.0258668
+-0.586004 0.177762 0.786763 0.0774893
+-0.596375 0.139055 0.78001 0.12878
+-0.604193 0.0997527 0.769917 0.17952
+-0.609424 0.060023 0.756528 0.22949
+-0.612045 0.0200363 0.739899 0.278478
+0.645152 0.0211201 -0.661438 -0.381881
+0.642389 0.0632698 -0.635045 -0.424324
+0.636876 0.105149 -0.605934 -0.464949
+0.628635 0.146577 -0.574227 -0.503584
+0.617702 0.187378 -0.540062 -0.540062
+0.604125 0.227376 -0.503584 -0.574227
+0.58796 0.266401 -0.464949 -0.605934
+0.569278 0.304285 -0.424324 -0.635045
+0.548158 0.340866 -0.381881 -0.661438
+0.52469 0.375988 -0.337804 -0.684998
+0.498976 0.409499 -0.292279 -0.705625
+0.471125 0.441257 -0.245503 -0.72323
+0.441257 0.471125 -0.197676 -0.737738
+0.409499 0.498976 -0.149003 -0.749087
+0.375988 0.52469 -0.099691 -0.757229
+0.340866 0.548158 -0.0499524 -0.762127
+0.304285 0.569278 6.27856e-08 -0.763763
+0.266401 0.58796 0.0499525 -0.762127
+0.227376 0.604125 0.0996911 -0.757229
+0.187378 0.617702 0.149003 -0.749087
+0.146577 0.628635 0.197676 -0.737738
+0.105148 0.636876 0.245504 -0.72323
+0.0632697 0.642389 0.292279 -0.705625
+0.02112 0.645152 0.337804 -0.684998
+-0.0211201 0.645152 0.381881 -0.661438
+-0.0632698 0.642389 0.424324 -0.635045
+-0.105149 0.636876 0.464949 -0.605934
+-0.146577 0.628635 0.503584 -0.574227
+-0.187378 0.617702 0.540062 -0.540062
+-0.227377 0.604125 0.574227 -0.503584
+-0.266401 0.58796 0.605934 -0.464949
+-0.304285 0.569278 0.635045 -0.424324
+-0.340866 0.548158 0.661438 -0.381881
+-0.375988 0.52469 0.684998 -0.337803
+-0.409499 0.498976 0.705625 -0.292279
+-0.441257 0.471125 0.72323 -0.245503
+-0.471125 0.441257 0.737738 -0.197676
+-0.498976 0.409499 0.749087 -0.149003
+-0.52469 0.375988 0.757229 -0.0996911
+-0.548158 0.340866 0.762127 -0.0499524
+-0.569278 0.304285 0.763763 -8.59245e-08
+-0.58796 0.266401 0.762127 0.0499525
+-0.604125 0.227376 0.757229 0.0996911
+-0.617702 0.187378 0.749087 0.149003
+-0.628635 0.146577 0.737738 0.197676
+-0.636876 0.105149 0.72323 0.245503
+-0.642389 0.0632698 0.705625 0.292279
+-0.645152 0.0211201 0.684998 0.337804
+0.645152 0.0211201 -0.72323 -0.245503
+0.642389 0.0632698 -0.705625 -0.292279
+0.636876 0.105149 -0.684998 -0.337804
+0.628635 0.146577 -0.661438 -0.381881
+0.617702 0.187378 -0.635045 -0.424324
+0.604125 0.227376 -0.605934 -0.464949
+0.58796 0.266401 -0.574227 -0.503584
+0.569278 0.304285 -0.540062 -0.540062
+0.548158 0.340866 -0.503584 -0.574227
+0.52469 0.375988 -0.464949 -0.605934
+0.498976 0.409499 -0.424324 -0.635045
+0.471125 0.441257 -0.381881 -0.661438
+0.441257 0.471125 -0.337804 -0.684998
+0.409499 0.498976 -0.292279 -0.705625
+0.375988 0.52469 -0.245503 -0.72323
+0.340866 0.548158 -0.197676 -0.737738
+0.304285 0.569278 -0.149003 -0.749087
+0.266401 0.58796 -0.099691 -0.757229
+0.227376 0.604125 -0.0499524 -0.762127
+0.187378 0.617702 3.28159e-08 -0.763763
+0.146577 0.628635 0.0499525 -0.762127
+0.105148 0.636876 0.0996911 -0.757229
+0.0632697 0.642389 0.149003 -0.749087
+0.02112 0.645152 0.197676 -0.737738
+-0.0211201 0.645152 0.245504 -0.72323
+-0.0632698 0.642389 0.292279 -0.705625
+-0.105149 0.636876 0.337804 -0.684998
+-0.146577 0.628635 0.381881 -0.661438
+-0.187378 0.617702 0.424324 -0.635045
+-0.227377 0.604125 0.464949 -0.605934
+-0.266401 0.58796 0.503584 -0.574227
+-0.304285 0.569278 0.540062 -0.540062
+-0.340866 0.548158 0.574227 -0.503584
+-0.375988 0.52469 0.605934 -0.464949
+-0.409499 0.498976 0.635045 -0.424324
+-0.441257 0.471125 0.661438 -0.381881
+-0.471125 0.441257 0.684998 -0.337804
+-0.498976 0.409499 0.705625 -0.292279
+-0.52469 0.375988 0.72323 -0.245504
+-0.548158 0.340866 0.737738 -0.197676
+-0.569278 0.304285 0.749087 -0.149003
+-0.58796 0.266401 0.757229 -0.099691
+-0.604125 0.227376 0.762127 -0.0499524
+-0.617702 0.187378 0.763763 -2.48466e-08
+-0.628635 0.146577 0.762127 0.0499525
+-0.636876 0.105149 0.757229 0.099691
+-0.642389 0.0632698 0.749087 0.149003
+-0.645152 0.0211201 0.737738 0.197676
+0.676641 0.0221509 -0.670378 -0.303744
+0.673743 0.0663579 -0.649076 -0.346939
+0.667961 0.110281 -0.624996 -0.388647
+0.659318 0.153731 -0.598239 -0.428692
+0.647852 0.196524 -0.56892 -0.466901
+0.633611 0.238474 -0.537165 -0.50311
+0.616658 0.279404 -0.50311 -0.537165
+0.597064 0.319137 -0.466901 -0.56892
+0.574913 0.357504 -0.428692 -0.598239
+0.5503 0.394339 -0.388647 -0.624996
+0.523331 0.429486 -0.346939 -0.649076
+0.49412 0.462794 -0.303744 -0.670378
+0.462794 0.49412 -0.259249 -0.688808
+0.429486 0.523331 -0.213644 -0.704289
+0.394339 0.5503 -0.167124 -0.716754
+0.357504 0.574913 -0.119888 -0.72615
+0.319137 0.597064 -0.0721386 -0.732436
+0.279404 0.616658 -0.0240805 -0.735586
+0.238474 0.633611 0.0240806 -0.735586
+0.196524 0.647852 0.0721387 -0.732436
+0.153731 0.659318 0.119888 -0.72615
+0.110281 0.667961 0.167124 -0.716754
+0.0663578 0.673743 0.213644 -0.704289
+0.0221508 0.676641 0.259249 -0.688808
+-0.022151 0.676641 0.303744 -0.670378
+-0.066358 0.673743 0.346939 -0.649076
+-0.110281 0.667961 0.388647 -0.624996
+-0.153731 0.659318 0.428692 -0.598239
+-0.196524 0.647852 0.466901 -0.56892
+-0.238475 0.633611 0.50311 -0.537165
+-0.279404 0.616658 0.537165 -0.50311
+-0.319137 0.597064 0.56892 -0.466901
+-0.357504 0.574913 0.598239 -0.428692
+-0.394339 0.5503 0.624996 -0.388647
+-0.429486 0.523331 0.649076 -0.346939
+-0.462794 0.49412 0.670378 -0.303744
+-0.49412 0.462794 0.688808 -0.259249
+-0.523331 0.429486 0.704289 -0.213644
+-0.5503 0.394339 0.716754 -0.167124
+-0.574913 0.357504 0.72615 -0.119888
+-0.597063 0.319137 0.732436 -0.0721387
+-0.616658 0.279404 0.735586 -0.0240806
+-0.633611 0.238474 0.735586 0.0240807
+-0.647852 0.196524 0.732436 0.0721386
+-0.659318 0.153731 0.72615 0.119888
+-0.667961 0.110281 0.716754 0.167124
+-0.673743 0.0663579 0.704289 0.213644
+-0.676641 0.022151 0.688808 0.259249
+0.676641 0.0221509 -0.598239 -0.428692
+0.673743 0.0663579 -0.56892 -0.466901
+0.667961 0.110281 -0.537165 -0.50311
+0.659318 0.153731 -0.50311 -0.537165
+0.647852 0.196524 -0.466901 -0.56892
+0.633611 0.238474 -0.428692 -0.598239
+0.616658 0.279404 -0.388647 -0.624996
+0.597064 0.319137 -0.346939 -0.649076
+0.574913 0.357504 -0.303744 -0.670378
+0.5503 0.394339 -0.259249 -0.688808
+0.523331 0.429486 -0.213644 -0.704289
+0.49412 0.462794 -0.167124 -0.716754
+0.462794 0.49412 -0.119888 -0.72615
+0.429486 0.523331 -0.0721387 -0.732436
+0.394339 0.5503 -0.0240806 -0.735586
+0.357504 0.574913 0.0240807 -0.735586
+0.319137 0.597064 0.0721387 -0.732436
+0.279404 0.616658 0.119888 -0.72615
+0.238474 0.633611 0.167124 -0.716754
+0.196524 0.647852 0.213644 -0.704289
+0.153731 0.659318 0.259249 -0.688808
+0.110281 0.667961 0.303744 -0.670378
+0.0663578 0.673743 0.346939 -0.649076
+0.0221508 0.676641 0.388647 -0.624996
+-0.022151 0.676641 0.428692 -0.598239
+-0.066358 0.673743 0.466901 -0.56892
+-0.110281 0.667961 0.50311 -0.537165
+-0.153731 0.659318 0.537165 -0.50311
+-0.196524 0.647852 0.56892 -0.466901
+-0.238475 0.633611 0.598239 -0.428692
+-0.279404 0.616658 0.624996 -0.388647
+-0.319137 0.597064 0.649076 -0.346939
+-0.357504 0.574913 0.670378 -0.303744
+-0.394339 0.5503 0.688808 -0.259249
+-0.429486 0.523331 0.704289 -0.213644
+-0.462794 0.49412 0.716754 -0.167124
+-0.49412 0.462794 0.72615 -0.119888
+-0.523331 0.429486 0.732436 -0.0721386
+-0.5503 0.394339 0.735586 -0.0240807
+-0.574913 0.357504 0.735586 0.0240806
+-0.597063 0.319137 0.732436 0.0721386
+-0.616658 0.279404 0.72615 0.119888
+-0.633611 0.238474 0.716754 0.167124
+-0.647852 0.196524 0.704289 0.213644
+-0.659318 0.153731 0.688808 0.259249
+-0.667961 0.110281 0.670378 0.303744
+-0.673743 0.0663579 0.649076 0.346939
+-0.676641 0.022151 0.624996 0.388647
+0.706728 0.0231359 -0.531631 -0.466228
+0.703702 0.0693086 -0.5 -0.5
+0.697662 0.115184 -0.466228 -0.531631
+0.688635 0.160567 -0.430459 -0.560986
+0.676659 0.205262 -0.392847 -0.587938
+0.661785 0.249078 -0.353553 -0.612372
+0.644078 0.291828 -0.312745 -0.634185
+0.623612 0.333328 -0.270598 -0.653281
+0.600477 0.3734 -0.227292 -0.669581
+0.574769 0.411874 -0.183013 -0.683013
+0.546601 0.448584 -0.13795 -0.69352
+0.516092 0.483373 -0.0922959 -0.701057
+0.483373 0.516092 -0.046247 -0.705593
+0.448584 0.546601 1.58103e-09 -0.707107
+0.411874 0.574769 0.046247 -0.705593
+0.3734 0.600477 0.092296 -0.701057
+0.333328 0.623613 0.13795 -0.69352
+0.291828 0.644078 0.183013 -0.683013
+0.249078 0.661785 0.227292 -0.669581
+0.205262 0.676659 0.270598 -0.653281
+0.160567 0.688635 0.312745 -0.634185
+0.115184 0.697662 0.353553 -0.612372
+0.0693085 0.703702 0.392848 -0.587938
+0.0231358 0.706728 0.430459 -0.560985
+-0.023136 0.706728 0.466228 -0.531631
+-0.0693086 0.703702 0.5 -0.5
+-0.115185 0.697662 0.531631 -0.466228
+-0.160567 0.688635 0.560986 -0.430459
+-0.205262 0.676659 0.587938 -0.392847
+-0.249078 0.661785 0.612372 -0.353553
+-0.291828 0.644078 0.634185 -0.312745
+-0.333328 0.623613 0.653281 -0.270598
+-0.3734 0.600477 0.669581 -0.227292
+-0.411874 0.574769 0.683013 -0.183013
+-0.448584 0.546601 0.69352 -0.13795
+-0.483373 0.516092 0.701057 -0.0922959
+-0.516092 0.483373 0.705593 -0.046247
+-0.546601 0.448584 0.707107 3.24897e-08
+-0.574769 0.411874 0.705593 0.0462469
+-0.600477 0.3734 0.701057 0.092296
+-0.623612 0.333328 0.69352 0.13795
+-0.644078 0.291828 0.683013 0.183013
+-0.661785 0.249078 0.669581 0.227292
+-0.676659 0.205262 0.653281 0.270598
+-0.688635 0.160567 0.634185 0.312745
+-0.697662 0.115185 0.612372 0.353553
+-0.703702 0.0693086 0.587938 0.392848
+-0.706728 0.0231359 0.560986 0.430459
+0.706728 0.0231359 -0.612372 -0.353553
+0.703702 0.0693086 -0.587938 -0.392847
+0.697662 0.115184 -0.560986 -0.430459
+0.688635 0.160567 -0.531631 -0.466228
+0.676659 0.205262 -0.5 -0.5
+0.661785 0.249078 -0.466228 -0.531631
+0.644078 0.291828 -0.430459 -0.560986
+0.623612 0.333328 -0.392847 -0.587938
+0.600477 0.3734 -0.353553 -0.612372
+0.574769 0.411874 -0.312745 -0.634185
+0.546601 0.448584 -0.270598 -0.653281
+0.516092 0.483373 -0.227292 -0.669581
+0.483373 0.516092 -0.183013 -0.683013
+0.448584 0.546601 -0.13795 -0.69352
+0.411874 0.574769 -0.0922959 -0.701057
+0.3734 0.600477 -0.0462469 -0.705593
+0.333328 0.623613 5.81282e-08 -0.707107
+0.291828 0.644078 0.046247 -0.705593
+0.249078 0.661785 0.092296 -0.701057
+0.205262 0.676659 0.13795 -0.69352
+0.160567 0.688635 0.183013 -0.683013
+0.115184 0.697662 0.227292 -0.669581
+0.0693085 0.703702 0.270598 -0.653281
+0.0231358 0.706728 0.312745 -0.634185
+-0.023136 0.706728 0.353553 -0.612372
+-0.0693086 0.703702 0.392848 -0.587938
+-0.115185 0.697662 0.430459 -0.560985
+-0.160567 0.688635 0.466228 -0.531631
+-0.205262 0.676659 0.5 -0.5
+-0.249078 0.661785 0.531631 -0.466228
+-0.291828 0.644078 0.560986 -0.430459
+-0.333328 0.623613 0.587938 -0.392848
+-0.3734 0.600477 0.612372 -0.353553
+-0.411874 0.574769 0.634185 -0.312745
+-0.448584 0.546601 0.653281 -0.270598
+-0.483373 0.516092 0.669581 -0.227292
+-0.516092 0.483373 0.683013 -0.183013
+-0.546601 0.448584 0.69352 -0.13795
+-0.574769 0.411874 0.701057 -0.092296
+-0.600477 0.3734 0.705593 -0.046247
+-0.623612 0.333328 0.707107 -7.95506e-08
+-0.644078 0.291828 0.705593 0.046247
+-0.661785 0.249078 0.701057 0.092296
+-0.676659 0.205262 0.69352 0.13795
+-0.688635 0.160567 0.683013 0.183013
+-0.697662 0.115185 0.669581 0.227292
+-0.703702 0.0693086 0.653281 0.270598
+-0.706728 0.0231359 0.634185 0.312745
+0.735586 0.0240806 -0.5503 -0.394339
+0.732436 0.0721387 -0.523331 -0.429486
+0.72615 0.119888 -0.49412 -0.462794
+0.716754 0.167124 -0.462794 -0.49412
+0.704289 0.213644 -0.429486 -0.523331
+0.688808 0.259249 -0.394339 -0.5503
+0.670378 0.303744 -0.357504 -0.574913
+0.649076 0.346939 -0.319137 -0.597064
+0.624996 0.388647 -0.279404 -0.616658
+0.598239 0.428692 -0.238474 -0.633611
+0.56892 0.466901 -0.196524 -0.647852
+0.537165 0.50311 -0.153731 -0.659318
+0.50311 0.537165 -0.110281 -0.667961
+0.466901 0.56892 -0.0663579 -0.673743
+0.428692 0.598239 -0.0221509 -0.676641
+0.388647 0.624996 0.022151 -0.676641
+0.346939 0.649077 0.066358 -0.673743
+0.303744 0.670378 0.110281 -0.667961
+0.259249 0.688808 0.153731 -0.659318
+0.213644 0.704289 0.196524 -0.647852
+0.167124 0.716754 0.238474 -0.633611
+0.119888 0.72615 0.279404 -0.616658
+0.0721386 0.732436 0.319137 -0.597063
+0.0240805 0.735586 0.357504 -0.574913
+-0.0240807 0.735586 0.394339 -0.5503
+-0.0721387 0.732436 0.429486 -0.523331
+-0.119888 0.72615 0.462794 -0.49412
+-0.167124 0.716754 0.49412 -0.462794
+-0.213644 0.704289 0.523331 -0.429486
+-0.259249 0.688808 0.5503 -0.394339
+-0.303744 0.670378 0.574913 -0.357504
+-0.346939 0.649077 0.597063 -0.319137
+-0.388647 0.624996 0.616658 -0.279404
+-0.428692 0.598239 0.633611 -0.238474
+-0.466901 0.56892 0.647852 -0.196524
+-0.50311 0.537165 0.659318 -0.153731
+-0.537165 0.50311 0.667961 -0.110281
+-0.56892 0.466901 0.673743 -0.0663579
+-0.598239 0.428692 0.676641 -0.022151
+-0.624996 0.388647 0.676641 0.0221509
+-0.649076 0.346939 0.673743 0.0663578
+-0.670378 0.303744 0.667961 0.110281
+-0.688808 0.259249 0.659318 0.153731
+-0.704289 0.213644 0.647852 0.196524
+-0.716754 0.167124 0.633611 0.238474
+-0.72615 0.119888 0.616658 0.279404
+-0.732436 0.0721386 0.597064 0.319137
+-0.735586 0.0240807 0.574913 0.357504
+0.676641 0.0221509 -0.716754 -0.167124
+0.673743 0.0663579 -0.704289 -0.213644
+0.667961 0.110281 -0.688808 -0.259249
+0.659318 0.153731 -0.670378 -0.303744
+0.647852 0.196524 -0.649076 -0.346939
+0.633611 0.238474 -0.624996 -0.388647
+0.616658 0.279404 -0.598239 -0.428692
+0.597064 0.319137 -0.56892 -0.466901
+0.574913 0.357504 -0.537165 -0.50311
+0.5503 0.394339 -0.50311 -0.537165
+0.523331 0.429486 -0.466901 -0.56892
+0.49412 0.462794 -0.428692 -0.598239
+0.462794 0.49412 -0.388647 -0.624996
+0.429486 0.523331 -0.346939 -0.649076
+0.394339 0.5503 -0.303744 -0.670378
+0.357504 0.574913 -0.259249 -0.688808
+0.319137 0.597064 -0.213644 -0.704289
+0.279404 0.616658 -0.167124 -0.716754
+0.238474 0.633611 -0.119888 -0.72615
+0.196524 0.647852 -0.0721386 -0.732436
+0.153731 0.659318 -0.0240806 -0.735586
+0.110281 0.667961 0.0240807 -0.735586
+0.0663578 0.673743 0.0721388 -0.732436
+0.0221508 0.676641 0.119888 -0.72615
+-0.022151 0.676641 0.167124 -0.716754
+-0.066358 0.673743 0.213644 -0.704289
+-0.110281 0.667961 0.259249 -0.688808
+-0.153731 0.659318 0.303744 -0.670378
+-0.196524 0.647852 0.346939 -0.649076
+-0.238475 0.633611 0.388648 -0.624996
+-0.279404 0.616658 0.428692 -0.598239
+-0.319137 0.597064 0.466901 -0.56892
+-0.357504 0.574913 0.50311 -0.537165
+-0.394339 0.5503 0.537165 -0.50311
+-0.429486 0.523331 0.56892 -0.466901
+-0.462794 0.49412 0.598239 -0.428692
+-0.49412 0.462794 0.624996 -0.388647
+-0.523331 0.429486 0.649076 -0.346939
+-0.5503 0.394339 0.670378 -0.303744
+-0.574913 0.357504 0.688808 -0.259249
+-0.597063 0.319137 0.704289 -0.213644
+-0.616658 0.279404 0.716754 -0.167124
+-0.633611 0.238474 0.72615 -0.119888
+-0.647852 0.196524 0.732436 -0.0721387
+-0.659318 0.153731 0.735586 -0.0240805
+-0.667961 0.110281 0.735586 0.0240805
+-0.673743 0.0663579 0.732436 0.0721387
+-0.676641 0.022151 0.72615 0.119888
+0.706728 0.0231359 -0.669581 -0.227292
+0.703702 0.0693086 -0.653281 -0.270598
+0.697662 0.115184 -0.634185 -0.312745
+0.688635 0.160567 -0.612372 -0.353553
+0.676659 0.205262 -0.587938 -0.392847
+0.661785 0.249078 -0.560986 -0.430459
+0.644078 0.291828 -0.531631 -0.466228
+0.623612 0.333328 -0.5 -0.5
+0.600477 0.3734 -0.466228 -0.531631
+0.574769 0.411874 -0.430459 -0.560986
+0.546601 0.448584 -0.392847 -0.587938
+0.516092 0.483373 -0.353553 -0.612372
+0.483373 0.516092 -0.312745 -0.634185
+0.448584 0.546601 -0.270598 -0.653281
+0.411874 0.574769 -0.227292 -0.669581
+0.3734 0.600477 -0.183013 -0.683013
+0.333328 0.623613 -0.13795 -0.69352
+0.291828 0.644078 -0.0922959 -0.701057
+0.249078 0.661785 -0.046247 -0.705593
+0.205262 0.676659 3.03816e-08 -0.707107
+0.160567 0.688635 0.046247 -0.705593
+0.115184 0.697662 0.0922961 -0.701057
+0.0693085 0.703702 0.13795 -0.69352
+0.0231358 0.706728 0.183013 -0.683013
+-0.023136 0.706728 0.227292 -0.669581
+-0.0693086 0.703702 0.270598 -0.653281
+-0.115185 0.697662 0.312745 -0.634185
+-0.160567 0.688635 0.353553 -0.612372
+-0.205262 0.676659 0.392848 -0.587938
+-0.249078 0.661785 0.430459 -0.560985
+-0.291828 0.644078 0.466228 -0.531631
+-0.333328 0.623613 0.5 -0.5
+-0.3734 0.600477 0.531631 -0.466228
+-0.411874 0.574769 0.560986 -0.430459
+-0.448584 0.546601 0.587938 -0.392847
+-0.483373 0.516092 0.612372 -0.353553
+-0.516092 0.483373 0.634185 -0.312745
+-0.546601 0.448584 0.653281 -0.270598
+-0.574769 0.411874 0.669581 -0.227292
+-0.600477 0.3734 0.683013 -0.183013
+-0.623612 0.333328 0.69352 -0.13795
+-0.644078 0.291828 0.701057 -0.092296
+-0.661785 0.249078 0.705593 -0.0462469
+-0.676659 0.205262 0.707107 -2.30035e-08
+-0.688635 0.160567 0.705593 0.046247
+-0.697662 0.115185 0.701057 0.0922959
+-0.703702 0.0693086 0.69352 0.13795
+-0.706728 0.0231359 0.683013 0.183013
+0.706728 0.0231359 -0.701057 -0.092296
+0.703702 0.0693086 -0.69352 -0.13795
+0.697662 0.115184 -0.683013 -0.183013
+0.688635 0.160567 -0.669581 -0.227292
+0.676659 0.205262 -0.653281 -0.270598
+0.661785 0.249078 -0.634185 -0.312745
+0.644078 0.291828 -0.612372 -0.353553
+0.623612 0.333328 -0.587938 -0.392847
+0.600477 0.3734 -0.560986 -0.430459
+0.574769 0.411874 -0.531631 -0.466228
+0.546601 0.448584 -0.5 -0.5
+0.516092 0.483373 -0.466228 -0.531631
+0.483373 0.516092 -0.430459 -0.560986
+0.448584 0.546601 -0.392847 -0.587938
+0.411874 0.574769 -0.353553 -0.612372
+0.3734 0.600477 -0.312745 -0.634185
+0.333328 0.623613 -0.270598 -0.653282
+0.291828 0.644078 -0.227292 -0.669581
+0.249078 0.661785 -0.183013 -0.683013
+0.205262 0.676659 -0.13795 -0.69352
+0.160567 0.688635 -0.0922959 -0.701057
+0.115184 0.697662 -0.0462469 -0.705593
+0.0693085 0.703702 8.69287e-08 -0.707107
+0.0231358 0.706728 0.0462471 -0.705593
+-0.023136 0.706728 0.092296 -0.701057
+-0.0693086 0.703702 0.13795 -0.69352
+-0.115185 0.697662 0.183013 -0.683013
+-0.160567 0.688635 0.227292 -0.669581
+-0.205262 0.676659 0.270598 -0.653281
+-0.249078 0.661785 0.312745 -0.634185
+-0.291828 0.644078 0.353553 -0.612372
+-0.333328 0.623613 0.392847 -0.587938
+-0.3734 0.600477 0.430459 -0.560986
+-0.411874 0.574769 0.466228 -0.531631
+-0.448584 0.546601 0.5 -0.5
+-0.483373 0.516092 0.531631 -0.466228
+-0.516092 0.483373 0.560986 -0.430459
+-0.546601 0.448584 0.587938 -0.392847
+-0.574769 0.411874 0.612372 -0.353553
+-0.600477 0.3734 0.634185 -0.312745
+-0.623612 0.333328 0.653281 -0.270598
+-0.644078 0.291828 0.669581 -0.227292
+-0.661785 0.249078 0.683013 -0.183013
+-0.676659 0.205262 0.69352 -0.13795
+-0.688635 0.160567 0.701057 -0.0922959
+-0.697662 0.115185 0.705593 -0.046247
+-0.703702 0.0693086 0.707107 3.35437e-08
+-0.706728 0.0231359 0.705593 0.0462469
+0.735586 0.0240806 -0.659318 -0.153731
+0.732436 0.0721387 -0.647852 -0.196524
+0.72615 0.119888 -0.633611 -0.238474
+0.716754 0.167124 -0.616658 -0.279404
+0.704289 0.213644 -0.597064 -0.319137
+0.688808 0.259249 -0.574913 -0.357504
+0.670378 0.303744 -0.5503 -0.394339
+0.649076 0.346939 -0.523331 -0.429486
+0.624996 0.388647 -0.49412 -0.462794
+0.598239 0.428692 -0.462794 -0.49412
+0.56892 0.466901 -0.429486 -0.523331
+0.537165 0.50311 -0.394339 -0.5503
+0.50311 0.537165 -0.357504 -0.574913
+0.466901 0.56892 -0.319137 -0.597064
+0.428692 0.598239 -0.279404 -0.616658
+0.388647 0.624996 -0.238474 -0.633611
+0.346939 0.649077 -0.196524 -0.647852
+0.303744 0.670378 -0.153731 -0.659318
+0.259249 0.688808 -0.110281 -0.667961
+0.213644 0.704289 -0.0663579 -0.673743
+0.167124 0.716754 -0.0221509 -0.676641
+0.119888 0.72615 0.022151 -0.676641
+0.0721386 0.732436 0.066358 -0.673743
+0.0240805 0.735586 0.110281 -0.667961
+-0.0240807 0.735586 0.153731 -0.659318
+-0.0721387 0.732436 0.196524 -0.647852
+-0.119888 0.72615 0.238474 -0.633611
+-0.167124 0.716754 0.279404 -0.616658
+-0.213644 0.704289 0.319137 -0.597063
+-0.259249 0.688808 0.357504 -0.574913
+-0.303744 0.670378 0.394339 -0.5503
+-0.346939 0.649077 0.429486 -0.523331
+-0.388647 0.624996 0.462794 -0.49412
+-0.428692 0.598239 0.49412 -0.462794
+-0.466901 0.56892 0.523331 -0.429486
+-0.50311 0.537165 0.5503 -0.394339
+-0.537165 0.50311 0.574913 -0.357504
+-0.56892 0.466901 0.597064 -0.319137
+-0.598239 0.428692 0.616658 -0.279404
+-0.624996 0.388647 0.633611 -0.238474
+-0.649076 0.346939 0.647852 -0.196524
+-0.670378 0.303744 0.659318 -0.153731
+-0.688808 0.259249 0.667961 -0.110281
+-0.704289 0.213644 0.673743 -0.0663579
+-0.716754 0.167124 0.676641 -0.0221509
+-0.72615 0.119888 0.676641 0.0221509
+-0.732436 0.0721386 0.673743 0.0663579
+-0.735586 0.0240807 0.667961 0.110281
+0.735586 0.0240806 -0.616658 -0.279404
+0.732436 0.0721387 -0.597064 -0.319137
+0.72615 0.119888 -0.574913 -0.357504
+0.716754 0.167124 -0.5503 -0.394339
+0.704289 0.213644 -0.523331 -0.429486
+0.688808 0.259249 -0.49412 -0.462794
+0.670378 0.303744 -0.462794 -0.49412
+0.649076 0.346939 -0.429486 -0.523331
+0.624996 0.388647 -0.394339 -0.5503
+0.598239 0.428692 -0.357504 -0.574913
+0.56892 0.466901 -0.319137 -0.597064
+0.537165 0.50311 -0.279404 -0.616658
+0.50311 0.537165 -0.238474 -0.633611
+0.466901 0.56892 -0.196524 -0.647852
+0.428692 0.598239 -0.153731 -0.659318
+0.388647 0.624996 -0.110281 -0.667961
+0.346939 0.649077 -0.0663579 -0.673743
+0.303744 0.670378 -0.0221509 -0.676641
+0.259249 0.688808 0.022151 -0.676641
+0.213644 0.704289 0.0663579 -0.673743
+0.167124 0.716754 0.110281 -0.667961
+0.119888 0.72615 0.153731 -0.659318
+0.0721386 0.732436 0.196524 -0.647852
+0.0240805 0.735586 0.238474 -0.633611
+-0.0240807 0.735586 0.279404 -0.616658
+-0.0721387 0.732436 0.319137 -0.597063
+-0.119888 0.72615 0.357504 -0.574913
+-0.167124 0.716754 0.394339 -0.5503
+-0.213644 0.704289 0.429486 -0.52333
+-0.259249 0.688808 0.462794 -0.49412
+-0.303744 0.670378 0.49412 -0.462794
+-0.346939 0.649077 0.523331 -0.429486
+-0.388647 0.624996 0.5503 -0.394339
+-0.428692 0.598239 0.574913 -0.357504
+-0.466901 0.56892 0.597064 -0.319137
+-0.50311 0.537165 0.616658 -0.279404
+-0.537165 0.50311 0.633611 -0.238474
+-0.56892 0.466901 0.647852 -0.196524
+-0.598239 0.428692 0.659318 -0.153731
+-0.624996 0.388647 0.667961 -0.110281
+-0.649076 0.346939 0.673743 -0.066358
+-0.670378 0.303744 0.676641 -0.0221509
+-0.688808 0.259249 0.676641 0.022151
+-0.704289 0.213644 0.673743 0.0663579
+-0.716754 0.167124 0.667961 0.110281
+-0.72615 0.119888 0.659318 0.153731
+-0.732436 0.0721386 0.647852 0.196524
+-0.735586 0.0240807 0.633611 0.238474
+0.763354 0.0249896 -0.559017 -0.322749
+0.760085 0.0748618 -0.536711 -0.358619
+0.753561 0.124413 -0.512107 -0.392954
+0.743811 0.173432 -0.485311 -0.425606
+0.730875 0.221709 -0.456435 -0.456435
+0.71481 0.269035 -0.425606 -0.485311
+0.695684 0.31521 -0.392954 -0.512107
+0.673578 0.360035 -0.358619 -0.536711
+0.648589 0.403318 -0.322749 -0.559017
+0.620822 0.444875 -0.285496 -0.578929
+0.590396 0.484526 -0.247021 -0.596362
+0.557443 0.522102 -0.207488 -0.611241
+0.522102 0.557443 -0.167067 -0.623502
+0.484526 0.590396 -0.12593 -0.633094
+0.444875 0.620822 -0.0842543 -0.639975
+0.403318 0.648589 -0.0422175 -0.644115
+0.360035 0.673579 5.30635e-08 -0.645497
+0.31521 0.695684 0.0422176 -0.644115
+0.269035 0.71481 0.0842543 -0.639975
+0.221709 0.730875 0.12593 -0.633094
+0.173432 0.743811 0.167067 -0.623502
+0.124413 0.753561 0.207488 -0.611241
+0.0748617 0.760085 0.247021 -0.596362
+0.0249895 0.763354 0.285496 -0.578929
+-0.0249897 0.763354 0.322749 -0.559017
+-0.0748619 0.760085 0.358619 -0.536711
+-0.124414 0.753561 0.392954 -0.512107
+-0.173432 0.743811 0.425606 -0.48531
+-0.221709 0.730875 0.456436 -0.456435
+-0.269036 0.71481 0.485311 -0.425606
+-0.31521 0.695684 0.512107 -0.392954
+-0.360035 0.673579 0.536711 -0.358619
+-0.403318 0.648589 0.559017 -0.322749
+-0.444875 0.620822 0.578929 -0.285496
+-0.484526 0.590397 0.596362 -0.247021
+-0.522102 0.557443 0.611241 -0.207488
+-0.557443 0.522102 0.623502 -0.167067
+-0.590397 0.484526 0.633094 -0.12593
+-0.620822 0.444875 0.639975 -0.0842544
+-0.648589 0.403318 0.644115 -0.0422175
+-0.673578 0.360035 0.645497 -7.26194e-08
+-0.695684 0.31521 0.644115 0.0422175
+-0.71481 0.269035 0.639975 0.0842544
+-0.730875 0.221709 0.633094 0.12593
+-0.743811 0.173432 0.623502 0.167067
+-0.753561 0.124414 0.611241 0.207488
+-0.760085 0.0748618 0.596362 0.247021
+-0.763354 0.0249897 0.578929 0.285496
+0.763354 0.0249896 -0.611241 -0.207488
+0.760085 0.0748618 -0.596362 -0.247021
+0.753561 0.124413 -0.578929 -0.285496
+0.743811 0.173432 -0.559017 -0.322749
+0.730875 0.221709 -0.536711 -0.358619
+0.71481 0.269035 -0.512107 -0.392954
+0.695684 0.31521 -0.485311 -0.425606
+0.673578 0.360035 -0.456435 -0.456435
+0.648589 0.403318 -0.425606 -0.485311
+0.620822 0.444875 -0.392954 -0.512107
+0.590396 0.484526 -0.358619 -0.536711
+0.557443 0.522102 -0.322749 -0.559017
+0.522102 0.557443 -0.285496 -0.578929
+0.484526 0.590396 -0.247021 -0.596362
+0.444875 0.620822 -0.207488 -0.611241
+0.403318 0.648589 -0.167067 -0.623502
+0.360035 0.673579 -0.12593 -0.633094
+0.31521 0.695684 -0.0842543 -0.639975
+0.269035 0.71481 -0.0422175 -0.644115
+0.221709 0.730875 2.77345e-08 -0.645497
+0.173432 0.743811 0.0422176 -0.644115
+0.124413 0.753561 0.0842544 -0.639975
+0.0748617 0.760085 0.12593 -0.633094
+0.0249895 0.763354 0.167067 -0.623502
+-0.0249897 0.763354 0.207488 -0.611241
+-0.0748619 0.760085 0.247021 -0.596362
+-0.124414 0.753561 0.285496 -0.578929
+-0.173432 0.743811 0.322749 -0.559017
+-0.221709 0.730875 0.358619 -0.536711
+-0.269036 0.71481 0.392954 -0.512107
+-0.31521 0.695684 0.425606 -0.48531
+-0.360035 0.673579 0.456435 -0.456436
+-0.403318 0.648589 0.485311 -0.425606
+-0.444875 0.620822 0.512107 -0.392954
+-0.484526 0.590397 0.536711 -0.358619
+-0.522102 0.557443 0.559017 -0.322749
+-0.557443 0.522102 0.578929 -0.285496
+-0.590397 0.484526 0.596362 -0.247021
+-0.620822 0.444875 0.611241 -0.207488
+-0.648589 0.403318 0.623502 -0.167067
+-0.673578 0.360035 0.633094 -0.12593
+-0.695684 0.31521 0.639975 -0.0842543
+-0.71481 0.269035 0.644115 -0.0422175
+-0.730875 0.221709 0.645497 -2.09992e-08
+-0.743811 0.173432 0.644115 0.0422176
+-0.753561 0.124414 0.639975 0.0842543
+-0.760085 0.0748618 0.633094 0.12593
+-0.763354 0.0249897 0.623502 0.167067
+0.790146 0.0258667 -0.557788 -0.25273
+0.786763 0.0774894 -0.540064 -0.28867
+0.78001 0.12878 -0.520028 -0.323374
+0.769917 0.17952 -0.497765 -0.356693
+0.756528 0.22949 -0.47337 -0.388485
+0.739899 0.278478 -0.446949 -0.418613
+0.720101 0.326274 -0.418613 -0.446949
+0.69722 0.372672 -0.388485 -0.47337
+0.671353 0.417474 -0.356693 -0.497765
+0.642612 0.460489 -0.323374 -0.520028
+0.611118 0.501532 -0.28867 -0.540064
+0.577008 0.540427 -0.25273 -0.557788
+0.540427 0.577008 -0.215708 -0.573123
+0.501532 0.611118 -0.177762 -0.586004
+0.460489 0.642612 -0.139055 -0.596375
+0.417474 0.671353 -0.0997526 -0.604193
+0.372672 0.69722 -0.0600229 -0.609424
+0.326274 0.720101 -0.0200362 -0.612045
+0.278478 0.739899 0.0200363 -0.612045
+0.22949 0.756528 0.060023 -0.609424
+0.17952 0.769917 0.0997527 -0.604193
+0.12878 0.78001 0.139055 -0.596375
+0.0774893 0.786763 0.177762 -0.586004
+0.0258666 0.790146 0.215708 -0.573123
+-0.0258668 0.790146 0.25273 -0.557788
+-0.0774894 0.786763 0.28867 -0.540064
+-0.12878 0.78001 0.323374 -0.520028
+-0.17952 0.769917 0.356693 -0.497765
+-0.22949 0.756528 0.388485 -0.47337
+-0.278478 0.739899 0.418613 -0.446949
+-0.326274 0.720101 0.446949 -0.418613
+-0.372672 0.69722 0.47337 -0.388485
+-0.417474 0.671353 0.497765 -0.356693
+-0.460489 0.642612 0.520028 -0.323374
+-0.501532 0.611118 0.540064 -0.28867
+-0.540427 0.577008 0.557788 -0.25273
+-0.577008 0.540427 0.573123 -0.215708
+-0.611118 0.501532 0.586004 -0.177762
+-0.642612 0.460489 0.596375 -0.139055
+-0.671353 0.417474 0.604193 -0.0997527
+-0.69722 0.372672 0.609424 -0.0600231
+-0.720101 0.326274 0.612045 -0.0200363
+-0.739899 0.278478 0.612045 0.0200363
+-0.756528 0.22949 0.609424 0.060023
+-0.769917 0.179519 0.604193 0.0997527
+-0.78001 0.12878 0.596375 0.139055
+-0.786763 0.0774893 0.586004 0.177762
+-0.790146 0.0258668 0.573123 0.215708
+0.612045 0.0200363 -0.739899 0.278478
+0.609424 0.060023 -0.756528 0.22949
+0.604193 0.0997527 -0.769917 0.17952
+0.596375 0.139055 -0.78001 0.12878
+0.586004 0.177762 -0.786763 0.0774894
+0.573123 0.215708 -0.790146 0.0258667
+0.557788 0.25273 -0.790146 -0.0258667
+0.540064 0.28867 -0.786763 -0.0774894
+0.520028 0.323374 -0.78001 -0.12878
+0.497765 0.356693 -0.769917 -0.17952
+0.47337 0.388485 -0.756528 -0.22949
+0.446949 0.418613 -0.739899 -0.278478
+0.418613 0.446949 -0.720101 -0.326274
+0.388485 0.47337 -0.69722 -0.372672
+0.356693 0.497765 -0.671353 -0.417474
+0.323374 0.520028 -0.642612 -0.460489
+0.28867 0.540064 -0.611118 -0.501532
+0.25273 0.557788 -0.577008 -0.540427
+0.215708 0.573123 -0.540427 -0.577008
+0.177762 0.586004 -0.501532 -0.611118
+0.139055 0.596375 -0.460489 -0.642612
+0.0997526 0.604193 -0.417474 -0.671353
+0.0600229 0.609424 -0.372672 -0.69722
+0.0200362 0.612045 -0.326273 -0.720101
+-0.0200363 0.612045 -0.278478 -0.739899
+-0.060023 0.609424 -0.22949 -0.756528
+-0.0997527 0.604193 -0.179519 -0.769917
+-0.139055 0.596375 -0.12878 -0.78001
+-0.177762 0.586004 -0.0774892 -0.786763
+-0.215708 0.573123 -0.0258666 -0.790146
+-0.25273 0.557788 0.0258668 -0.790146
+-0.28867 0.540064 0.0774893 -0.786763
+-0.323374 0.520028 0.12878 -0.78001
+-0.356693 0.497765 0.17952 -0.769917
+-0.388485 0.47337 0.22949 -0.756528
+-0.418613 0.446949 0.278478 -0.739899
+-0.446949 0.418613 0.326274 -0.720101
+-0.47337 0.388485 0.372672 -0.69722
+-0.497765 0.356693 0.417474 -0.671353
+-0.520028 0.323374 0.460489 -0.642612
+-0.540064 0.28867 0.501532 -0.611118
+-0.557788 0.25273 0.540427 -0.577008
+-0.573123 0.215708 0.577008 -0.540427
+-0.586004 0.177762 0.611118 -0.501532
+-0.596375 0.139055 0.642612 -0.460489
+-0.604193 0.0997527 0.671353 -0.417474
+-0.609424 0.060023 0.69722 -0.372672
+-0.612045 0.0200363 0.720101 -0.326274
+0.645152 0.0211201 -0.737738 0.197676
+0.642389 0.0632698 -0.749087 0.149003
+0.636876 0.105149 -0.757229 0.099691
+0.628635 0.146577 -0.762127 0.0499525
+0.617702 0.187378 -0.763763 -5.69236e-10
+0.604125 0.227376 -0.762127 -0.0499525
+0.58796 0.266401 -0.757229 -0.099691
+0.569278 0.304285 -0.749087 -0.149003
+0.548158 0.340866 -0.737738 -0.197676
+0.52469 0.375988 -0.72323 -0.245503
+0.498976 0.409499 -0.705625 -0.292279
+0.471125 0.441257 -0.684998 -0.337804
+0.441257 0.471125 -0.661438 -0.381881
+0.409499 0.498976 -0.635045 -0.424324
+0.375988 0.52469 -0.605934 -0.464949
+0.340866 0.548158 -0.574227 -0.503584
+0.304285 0.569278 -0.540062 -0.540062
+0.266401 0.58796 -0.503584 -0.574227
+0.227376 0.604125 -0.464949 -0.605934
+0.187378 0.617702 -0.424324 -0.635045
+0.146577 0.628635 -0.381881 -0.661438
+0.105148 0.636876 -0.337803 -0.684998
+0.0632697 0.642389 -0.292279 -0.705625
+0.02112 0.645152 -0.245503 -0.72323
+-0.0211201 0.645152 -0.197676 -0.737738
+-0.0632698 0.642389 -0.149003 -0.749087
+-0.105149 0.636876 -0.099691 -0.757229
+-0.146577 0.628635 -0.0499524 -0.762127
+-0.187378 0.617702 1.25002e-07 -0.763763
+-0.227377 0.604125 0.0499526 -0.762127
+-0.266401 0.58796 0.0996911 -0.757229
+-0.304285 0.569278 0.149003 -0.749087
+-0.340866 0.548158 0.197676 -0.737738
+-0.375988 0.52469 0.245504 -0.72323
+-0.409499 0.498976 0.292279 -0.705625
+-0.441257 0.471125 0.337804 -0.684998
+-0.471125 0.441257 0.381881 -0.661438
+-0.498976 0.409499 0.424324 -0.635045
+-0.52469 0.375988 0.464949 -0.605934
+-0.548158 0.340866 0.503584 -0.574227
+-0.569278 0.304285 0.540062 -0.540062
+-0.58796 0.266401 0.574227 -0.503584
+-0.604125 0.227376 0.605934 -0.464949
+-0.617702 0.187378 0.635045 -0.424324
+-0.628635 0.146577 0.661438 -0.381881
+-0.636876 0.105149 0.684998 -0.337804
+-0.642389 0.0632698 0.705625 -0.292279
+-0.645152 0.0211201 0.72323 -0.245504
+0.645152 0.0211201 -0.684998 0.337804
+0.642389 0.0632698 -0.705625 0.292279
+0.636876 0.105149 -0.72323 0.245503
+0.628635 0.146577 -0.737738 0.197676
+0.617702 0.187378 -0.749087 0.149003
+0.604125 0.227376 -0.757229 0.099691
+0.58796 0.266401 -0.762127 0.0499525
+0.569278 0.304285 -0.763763 -1.61233e-08
+0.548158 0.340866 -0.762127 -0.0499525
+0.52469 0.375988 -0.757229 -0.099691
+0.498976 0.409499 -0.749087 -0.149003
+0.471125 0.441257 -0.737738 -0.197676
+0.441257 0.471125 -0.72323 -0.245503
+0.409499 0.498976 -0.705625 -0.292279
+0.375988 0.52469 -0.684998 -0.337804
+0.340866 0.548158 -0.661438 -0.381881
+0.304285 0.569278 -0.635045 -0.424324
+0.266401 0.58796 -0.605934 -0.464949
+0.227376 0.604125 -0.574227 -0.503584
+0.187378 0.617702 -0.540062 -0.540062
+0.146577 0.628635 -0.503584 -0.574227
+0.105148 0.636876 -0.464949 -0.605934
+0.0632697 0.642389 -0.424324 -0.635045
+0.02112 0.645152 -0.381881 -0.661438
+-0.0211201 0.645152 -0.337804 -0.684998
+-0.0632698 0.642389 -0.292279 -0.705625
+-0.105149 0.636876 -0.245503 -0.72323
+-0.146577 0.628635 -0.197676 -0.737738
+-0.187378 0.617702 -0.149003 -0.749087
+-0.227377 0.604125 -0.0996909 -0.757229
+-0.266401 0.58796 -0.0499524 -0.762127
+-0.304285 0.569278 -8.70629e-08 -0.763763
+-0.340866 0.548158 0.0499525 -0.762127
+-0.375988 0.52469 0.0996911 -0.757229
+-0.409499 0.498976 0.149003 -0.749087
+-0.441257 0.471125 0.197676 -0.737738
+-0.471125 0.441257 0.245503 -0.72323
+-0.498976 0.409499 0.292279 -0.705625
+-0.52469 0.375988 0.337804 -0.684998
+-0.548158 0.340866 0.381881 -0.661438
+-0.569278 0.304285 0.424324 -0.635045
+-0.58796 0.266401 0.464949 -0.605934
+-0.604125 0.227376 0.503584 -0.574227
+-0.617702 0.187378 0.540062 -0.540062
+-0.628635 0.146577 0.574227 -0.503584
+-0.636876 0.105149 0.605934 -0.464949
+-0.642389 0.0632698 0.635045 -0.424324
+-0.645152 0.0211201 0.661438 -0.381881
+0.676641 0.0221509 -0.688808 0.259249
+0.673743 0.0663579 -0.704289 0.213644
+0.667961 0.110281 -0.716754 0.167124
+0.659318 0.153731 -0.72615 0.119888
+0.647852 0.196524 -0.732436 0.0721387
+0.633611 0.238474 -0.735586 0.0240806
+0.616658 0.279404 -0.735586 -0.0240806
+0.597064 0.319137 -0.732436 -0.0721387
+0.574913 0.357504 -0.72615 -0.119888
+0.5503 0.394339 -0.716754 -0.167124
+0.523331 0.429486 -0.704289 -0.213644
+0.49412 0.462794 -0.688808 -0.259249
+0.462794 0.49412 -0.670378 -0.303744
+0.429486 0.523331 -0.649076 -0.346939
+0.394339 0.5503 -0.624996 -0.388647
+0.357504 0.574913 -0.598239 -0.428692
+0.319137 0.597064 -0.56892 -0.466901
+0.279404 0.616658 -0.537165 -0.50311
+0.238474 0.633611 -0.50311 -0.537165
+0.196524 0.647852 -0.466901 -0.56892
+0.153731 0.659318 -0.428692 -0.598239
+0.110281 0.667961 -0.388647 -0.624996
+0.0663578 0.673743 -0.346939 -0.649077
+0.0221508 0.676641 -0.303744 -0.670378
+-0.022151 0.676641 -0.259249 -0.688808
+-0.066358 0.673743 -0.213644 -0.704289
+-0.110281 0.667961 -0.167124 -0.716754
+-0.153731 0.659318 -0.119888 -0.72615
+-0.196524 0.647852 -0.0721385 -0.732436
+-0.238475 0.633611 -0.0240805 -0.735586
+-0.279404 0.616658 0.0240807 -0.735586
+-0.319137 0.597064 0.0721386 -0.732436
+-0.357504 0.574913 0.119888 -0.72615
+-0.394339 0.5503 0.167124 -0.716754
+-0.429486 0.523331 0.213644 -0.704289
+-0.462794 0.49412 0.259249 -0.688808
+-0.49412 0.462794 0.303744 -0.670378
+-0.523331 0.429486 0.346939 -0.649076
+-0.5503 0.394339 0.388647 -0.624996
+-0.574913 0.357504 0.428692 -0.598239
+-0.597063 0.319137 0.466901 -0.56892
+-0.616658 0.279404 0.50311 -0.537165
+-0.633611 0.238474 0.537165 -0.50311
+-0.647852 0.196524 0.56892 -0.466901
+-0.659318 0.153731 0.598239 -0.428692
+-0.667961 0.110281 0.624996 -0.388647
+-0.673743 0.0663579 0.649076 -0.346939
+-0.676641 0.022151 0.670378 -0.303744
+0.676641 0.0221509 -0.72615 0.119888
+0.673743 0.0663579 -0.732436 0.0721387
+0.667961 0.110281 -0.735586 0.0240806
+0.659318 0.153731 -0.735586 -0.0240806
+0.647852 0.196524 -0.732436 -0.0721387
+0.633611 0.238474 -0.72615 -0.119888
+0.616658 0.279404 -0.716754 -0.167124
+0.597064 0.319137 -0.704289 -0.213644
+0.574913 0.357504 -0.688808 -0.259249
+0.5503 0.394339 -0.670378 -0.303744
+0.523331 0.429486 -0.649076 -0.346939
+0.49412 0.462794 -0.624996 -0.388647
+0.462794 0.49412 -0.598239 -0.428692
+0.429486 0.523331 -0.56892 -0.466901
+0.394339 0.5503 -0.537165 -0.50311
+0.357504 0.574913 -0.50311 -0.537165
+0.319137 0.597064 -0.466901 -0.56892
+0.279404 0.616658 -0.428692 -0.598239
+0.238474 0.633611 -0.388647 -0.624996
+0.196524 0.647852 -0.346939 -0.649076
+0.153731 0.659318 -0.303744 -0.670378
+0.110281 0.667961 -0.259249 -0.688808
+0.0663578 0.673743 -0.213644 -0.704289
+0.0221508 0.676641 -0.167124 -0.716754
+-0.022151 0.676641 -0.119888 -0.72615
+-0.066358 0.673743 -0.0721386 -0.732436
+-0.110281 0.667961 -0.0240805 -0.735586
+-0.153731 0.659318 0.0240806 -0.735586
+-0.196524 0.647852 0.0721388 -0.732436
+-0.238475 0.633611 0.119888 -0.72615
+-0.279404 0.616658 0.167124 -0.716754
+-0.319137 0.597064 0.213644 -0.704289
+-0.357504 0.574913 0.259249 -0.688808
+-0.394339 0.5503 0.303744 -0.670378
+-0.429486 0.523331 0.346939 -0.649076
+-0.462794 0.49412 0.388647 -0.624996
+-0.49412 0.462794 0.428692 -0.598239
+-0.523331 0.429486 0.466901 -0.56892
+-0.5503 0.394339 0.50311 -0.537165
+-0.574913 0.357504 0.537165 -0.50311
+-0.597063 0.319137 0.56892 -0.466901
+-0.616658 0.279404 0.598239 -0.428692
+-0.633611 0.238474 0.624996 -0.388647
+-0.647852 0.196524 0.649076 -0.346939
+-0.659318 0.153731 0.670378 -0.303744
+-0.667961 0.110281 0.688808 -0.259249
+-0.673743 0.0663579 0.704289 -0.213644
+-0.676641 0.022151 0.716754 -0.167124
+0.706728 0.0231359 -0.705593 0.046247
+0.703702 0.0693086 -0.707107 -1.93179e-09
+0.697662 0.115184 -0.705593 -0.046247
+0.688635 0.160567 -0.701057 -0.092296
+0.676659 0.205262 -0.69352 -0.13795
+0.661785 0.249078 -0.683013 -0.183013
+0.644078 0.291828 -0.669581 -0.227292
+0.623612 0.333328 -0.653281 -0.270598
+0.600477 0.3734 -0.634185 -0.312745
+0.574769 0.411874 -0.612372 -0.353553
+0.546601 0.448584 -0.587938 -0.392847
+0.516092 0.483373 -0.560986 -0.430459
+0.483373 0.516092 -0.531631 -0.466228
+0.448584 0.546601 -0.5 -0.5
+0.411874 0.574769 -0.466228 -0.531631
+0.3734 0.600477 -0.430459 -0.560986
+0.333328 0.623613 -0.392847 -0.587938
+0.291828 0.644078 -0.353553 -0.612372
+0.249078 0.661785 -0.312745 -0.634185
+0.205262 0.676659 -0.270598 -0.653281
+0.160567 0.688635 -0.227292 -0.669581
+0.115184 0.697662 -0.183013 -0.683013
+0.0693085 0.703702 -0.13795 -0.69352
+0.0231358 0.706728 -0.0922959 -0.701057
+-0.023136 0.706728 -0.0462469 -0.705593
+-0.0693086 0.703702 5.91822e-08 -0.707107
+-0.115185 0.697662 0.046247 -0.705593
+-0.160567 0.688635 0.092296 -0.701057
+-0.205262 0.676659 0.13795 -0.69352
+-0.249078 0.661785 0.183013 -0.683013
+-0.291828 0.644078 0.227292 -0.669581
+-0.333328 0.623613 0.270598 -0.653282
+-0.3734 0.600477 0.312745 -0.634185
+-0.411874 0.574769 0.353553 -0.612372
+-0.448584 0.546601 0.392847 -0.587938
+-0.483373 0.516092 0.430459 -0.560985
+-0.516092 0.483373 0.466228 -0.531631
+-0.546601 0.448584 0.5 -0.5
+-0.574769 0.411874 0.531631 -0.466228
+-0.600477 0.3734 0.560986 -0.430459
+-0.623612 0.333328 0.587938 -0.392848
+-0.644078 0.291828 0.612372 -0.353553
+-0.661785 0.249078 0.634185 -0.312745
+-0.676659 0.205262 0.653281 -0.270598
+-0.688635 0.160567 0.669581 -0.227292
+-0.697662 0.115185 0.683013 -0.183013
+-0.703702 0.0693086 0.69352 -0.13795
+-0.706728 0.0231359 0.701057 -0.092296
+0.706728 0.0231359 -0.683013 0.183013
+0.703702 0.0693086 -0.69352 0.13795
+0.697662 0.115184 -0.701057 0.092296
+0.688635 0.160567 -0.705593 0.046247
+0.676659 0.205262 -0.707107 -5.2701e-10
+0.661785 0.249078 -0.705593 -0.046247
+0.644078 0.291828 -0.701057 -0.092296
+0.623612 0.333328 -0.69352 -0.13795
+0.600477 0.3734 -0.683013 -0.183013
+0.574769 0.411874 -0.669581 -0.227292
+0.546601 0.448584 -0.653281 -0.270598
+0.516092 0.483373 -0.634185 -0.312745
+0.483373 0.516092 -0.612372 -0.353553
+0.448584 0.546601 -0.587938 -0.392847
+0.411874 0.574769 -0.560986 -0.430459
+0.3734 0.600477 -0.531631 -0.466228
+0.333328 0.623613 -0.5 -0.5
+0.291828 0.644078 -0.466228 -0.531631
+0.249078 0.661785 -0.430459 -0.560986
+0.205262 0.676659 -0.392847 -0.587938
+0.160567 0.688635 -0.353553 -0.612372
+0.115184 0.697662 -0.312745 -0.634185
+0.0693085 0.703702 -0.270598 -0.653282
+0.0231358 0.706728 -0.227292 -0.669581
+-0.023136 0.706728 -0.183013 -0.683013
+-0.0693086 0.703702 -0.13795 -0.69352
+-0.115185 0.697662 -0.0922959 -0.701057
+-0.160567 0.688635 -0.046247 -0.705593
+-0.205262 0.676659 1.15729e-07 -0.707107
+-0.249078 0.661785 0.0462471 -0.705593
+-0.291828 0.644078 0.0922961 -0.701057
+-0.333328 0.623613 0.13795 -0.69352
+-0.3734 0.600477 0.183013 -0.683013
+-0.411874 0.574769 0.227292 -0.669581
+-0.448584 0.546601 0.270598 -0.653281
+-0.483373 0.516092 0.312745 -0.634185
+-0.516092 0.483373 0.353553 -0.612372
+-0.546601 0.448584 0.392848 -0.587938
+-0.574769 0.411874 0.430459 -0.560986
+-0.600477 0.3734 0.466228 -0.531631
+-0.623612 0.333328 0.5 -0.5
+-0.644078 0.291828 0.531631 -0.466228
+-0.661785 0.249078 0.560986 -0.430459
+-0.676659 0.205262 0.587938 -0.392847
+-0.688635 0.160567 0.612372 -0.353553
+-0.697662 0.115185 0.634185 -0.312745
+-0.703702 0.0693086 0.653281 -0.270598
+-0.706728 0.0231359 0.669581 -0.227292
+0.735586 0.0240806 -0.667961 0.110281
+0.732436 0.0721387 -0.673743 0.0663579
+0.72615 0.119888 -0.676641 0.0221509
+0.716754 0.167124 -0.676641 -0.0221509
+0.704289 0.213644 -0.673743 -0.0663579
+0.688808 0.259249 -0.667961 -0.110281
+0.670378 0.303744 -0.659318 -0.153731
+0.649076 0.346939 -0.647852 -0.196524
+0.624996 0.388647 -0.633611 -0.238474
+0.598239 0.428692 -0.616658 -0.279404
+0.56892 0.466901 -0.597064 -0.319137
+0.537165 0.50311 -0.574913 -0.357504
+0.50311 0.537165 -0.5503 -0.394339
+0.466901 0.56892 -0.523331 -0.429486
+0.428692 0.598239 -0.49412 -0.462794
+0.388647 0.624996 -0.462794 -0.49412
+0.346939 0.649077 -0.429486 -0.523331
+0.303744 0.670378 -0.394339 -0.5503
+0.259249 0.688808 -0.357504 -0.574913
+0.213644 0.704289 -0.319137 -0.597064
+0.167124 0.716754 -0.279404 -0.616658
+0.119888 0.72615 -0.238474 -0.633611
+0.0721386 0.732436 -0.196524 -0.647852
+0.0240805 0.735586 -0.153731 -0.659318
+-0.0240807 0.735586 -0.110281 -0.667961
+-0.0721387 0.732436 -0.0663579 -0.673743
+-0.119888 0.72615 -0.0221509 -0.676641
+-0.167124 0.716754 0.022151 -0.676641
+-0.213644 0.704289 0.066358 -0.673743
+-0.259249 0.688808 0.110281 -0.667961
+-0.303744 0.670378 0.153731 -0.659318
+-0.346939 0.649077 0.196524 -0.647852
+-0.388647 0.624996 0.238474 -0.633611
+-0.428692 0.598239 0.279404 -0.616658
+-0.466901 0.56892 0.319137 -0.597064
+-0.50311 0.537165 0.357504 -0.574913
+-0.537165 0.50311 0.394339 -0.5503
+-0.56892 0.466901 0.429486 -0.523331
+-0.598239 0.428692 0.462794 -0.49412
+-0.624996 0.388647 0.49412 -0.462794
+-0.649076 0.346939 0.523331 -0.429486
+-0.670378 0.303744 0.5503 -0.394339
+-0.688808 0.259249 0.574913 -0.357504
+-0.704289 0.213644 0.597064 -0.319137
+-0.716754 0.167124 0.616658 -0.279404
+-0.72615 0.119888 0.633611 -0.238474
+-0.732436 0.0721386 0.647852 -0.196524
+-0.735586 0.0240807 0.659318 -0.153731
+0.676641 0.0221509 -0.624996 0.388647
+0.673743 0.0663579 -0.649076 0.346939
+0.667961 0.110281 -0.670378 0.303744
+0.659318 0.153731 -0.688808 0.259249
+0.647852 0.196524 -0.704289 0.213644
+0.633611 0.238474 -0.716754 0.167124
+0.616658 0.279404 -0.72615 0.119888
+0.597064 0.319137 -0.732436 0.0721386
+0.574913 0.357504 -0.735586 0.0240806
+0.5503 0.394339 -0.735586 -0.0240806
+0.523331 0.429486 -0.732436 -0.0721386
+0.49412 0.462794 -0.72615 -0.119888
+0.462794 0.49412 -0.716754 -0.167124
+0.429486 0.523331 -0.704289 -0.213644
+0.394339 0.5503 -0.688808 -0.259249
+0.357504 0.574913 -0.670378 -0.303744
+0.319137 0.597064 -0.649076 -0.346939
+0.279404 0.616658 -0.624996 -0.388647
+0.238474 0.633611 -0.598239 -0.428692
+0.196524 0.647852 -0.56892 -0.466901
+0.153731 0.659318 -0.537165 -0.50311
+0.110281 0.667961 -0.50311 -0.537165
+0.0663578 0.673743 -0.466901 -0.56892
+0.0221508 0.676641 -0.428692 -0.598239
+-0.022151 0.676641 -0.388647 -0.624996
+-0.066358 0.673743 -0.346939 -0.649077
+-0.110281 0.667961 -0.303744 -0.670378
+-0.153731 0.659318 -0.259249 -0.688808
+-0.196524 0.647852 -0.213644 -0.704289
+-0.238475 0.633611 -0.167123 -0.716754
+-0.279404 0.616658 -0.119888 -0.72615
+-0.319137 0.597064 -0.0721387 -0.732436
+-0.357504 0.574913 -0.0240806 -0.735586
+-0.394339 0.5503 0.0240807 -0.735586
+-0.429486 0.523331 0.0721386 -0.732436
+-0.462794 0.49412 0.119888 -0.72615
+-0.49412 0.462794 0.167124 -0.716754
+-0.523331 0.429486 0.213644 -0.704289
+-0.5503 0.394339 0.259249 -0.688808
+-0.574913 0.357504 0.303744 -0.670378
+-0.597063 0.319137 0.346939 -0.649077
+-0.616658 0.279404 0.388647 -0.624996
+-0.633611 0.238474 0.428692 -0.598239
+-0.647852 0.196524 0.466901 -0.56892
+-0.659318 0.153731 0.50311 -0.537165
+-0.667961 0.110281 0.537165 -0.50311
+-0.673743 0.0663579 0.56892 -0.466901
+-0.676641 0.022151 0.598239 -0.428692
+0.706728 0.0231359 -0.634185 0.312745
+0.703702 0.0693086 -0.653281 0.270598
+0.697662 0.115184 -0.669581 0.227292
+0.688635 0.160567 -0.683013 0.183013
+0.676659 0.205262 -0.69352 0.13795
+0.661785 0.249078 -0.701057 0.092296
+0.644078 0.291828 -0.705593 0.046247
+0.623612 0.333328 -0.707107 -1.49273e-08
+0.600477 0.3734 -0.705593 -0.046247
+0.574769 0.411874 -0.701057 -0.092296
+0.546601 0.448584 -0.69352 -0.13795
+0.516092 0.483373 -0.683013 -0.183013
+0.483373 0.516092 -0.669581 -0.227292
+0.448584 0.546601 -0.653281 -0.270598
+0.411874 0.574769 -0.634185 -0.312745
+0.3734 0.600477 -0.612372 -0.353553
+0.333328 0.623613 -0.587938 -0.392848
+0.291828 0.644078 -0.560985 -0.430459
+0.249078 0.661785 -0.531631 -0.466228
+0.205262 0.676659 -0.5 -0.5
+0.160567 0.688635 -0.466228 -0.531631
+0.115184 0.697662 -0.430459 -0.560986
+0.0693085 0.703702 -0.392847 -0.587938
+0.0231358 0.706728 -0.353553 -0.612372
+-0.023136 0.706728 -0.312745 -0.634185
+-0.0693086 0.703702 -0.270598 -0.653282
+-0.115185 0.697662 -0.227292 -0.669581
+-0.160567 0.688635 -0.183013 -0.683013
+-0.205262 0.676659 -0.13795 -0.69352
+-0.249078 0.661785 -0.0922959 -0.701057
+-0.291828 0.644078 -0.0462469 -0.705593
+-0.333328 0.623613 -8.06046e-08 -0.707107
+-0.3734 0.600477 0.046247 -0.705593
+-0.411874 0.574769 0.092296 -0.701057
+-0.448584 0.546601 0.13795 -0.69352
+-0.483373 0.516092 0.183013 -0.683013
+-0.516092 0.483373 0.227292 -0.669581
+-0.546601 0.448584 0.270598 -0.653281
+-0.574769 0.411874 0.312745 -0.634185
+-0.600477 0.3734 0.353553 -0.612372
+-0.623612 0.333328 0.392847 -0.587938
+-0.644078 0.291828 0.430459 -0.560986
+-0.661785 0.249078 0.466228 -0.531631
+-0.676659 0.205262 0.5 -0.5
+-0.688635 0.160567 0.531631 -0.466228
+-0.697662 0.115185 0.560986 -0.430459
+-0.703702 0.0693086 0.587938 -0.392847
+-0.706728 0.0231359 0.612372 -0.353553
+0.706728 0.0231359 -0.560986 0.430459
+0.703702 0.0693086 -0.587938 0.392847
+0.697662 0.115184 -0.612372 0.353553
+0.688635 0.160567 -0.634185 0.312745
+0.676659 0.205262 -0.653281 0.270598
+0.661785 0.249078 -0.669581 0.227292
+0.644078 0.291828 -0.683013 0.183013
+0.623612 0.333328 -0.69352 0.13795
+0.600477 0.3734 -0.701057 0.092296
+0.574769 0.411874 -0.705593 0.046247
+0.546601 0.448584 -0.707107 1.28193e-08
+0.516092 0.483373 -0.705593 -0.046247
+0.483373 0.516092 -0.701057 -0.092296
+0.448584 0.546601 -0.69352 -0.13795
+0.411874 0.574769 -0.683013 -0.183013
+0.3734 0.600477 -0.669581 -0.227292
+0.333328 0.623613 -0.653281 -0.270598
+0.291828 0.644078 -0.634185 -0.312745
+0.249078 0.661785 -0.612372 -0.353553
+0.205262 0.676659 -0.587938 -0.392848
+0.160567 0.688635 -0.560986 -0.430459
+0.115184 0.697662 -0.531631 -0.466228
+0.0693085 0.703702 -0.5 -0.5
+0.0231358 0.706728 -0.466228 -0.531631
+-0.023136 0.706728 -0.430459 -0.560986
+-0.0693086 0.703702 -0.392847 -0.587938
+-0.115185 0.697662 -0.353553 -0.612372
+-0.160567 0.688635 -0.312745 -0.634185
+-0.205262 0.676659 -0.270598 -0.653282
+-0.249078 0.661785 -0.227292 -0.669581
+-0.291828 0.644078 -0.183013 -0.683013
+-0.333328 0.623613 -0.13795 -0.69352
+-0.3734 0.600477 -0.092296 -0.701057
+-0.411874 0.574769 -0.0462469 -0.705593
+-0.448584 0.546601 -2.40575e-08 -0.707107
+-0.483373 0.516092 0.046247 -0.705593
+-0.516092 0.483373 0.0922959 -0.701057
+-0.546601 0.448584 0.13795 -0.69352
+-0.574769 0.411874 0.183013 -0.683013
+-0.600477 0.3734 0.227292 -0.669581
+-0.623612 0.333328 0.270598 -0.653282
+-0.644078 0.291828 0.312745 -0.634185
+-0.661785 0.249078 0.353553 -0.612372
+-0.676659 0.205262 0.392847 -0.587938
+-0.688635 0.160567 0.430459 -0.560985
+-0.697662 0.115185 0.466228 -0.531631
+-0.703702 0.0693086 0.5 -0.5
+-0.706728 0.0231359 0.531631 -0.466228
+0.735586 0.0240806 -0.574913 0.357504
+0.732436 0.0721387 -0.597064 0.319137
+0.72615 0.119888 -0.616658 0.279404
+0.716754 0.167124 -0.633611 0.238474
+0.704289 0.213644 -0.647852 0.196524
+0.688808 0.259249 -0.659318 0.153731
+0.670378 0.303744 -0.667961 0.110281
+0.649076 0.346939 -0.673743 0.0663579
+0.624996 0.388647 -0.676641 0.0221509
+0.598239 0.428692 -0.676641 -0.0221509
+0.56892 0.466901 -0.673743 -0.0663579
+0.537165 0.50311 -0.667961 -0.110281
+0.50311 0.537165 -0.659318 -0.153731
+0.466901 0.56892 -0.647852 -0.196524
+0.428692 0.598239 -0.633611 -0.238474
+0.388647 0.624996 -0.616658 -0.279404
+0.346939 0.649077 -0.597063 -0.319137
+0.303744 0.670378 -0.574913 -0.357504
+0.259249 0.688808 -0.5503 -0.394339
+0.213644 0.704289 -0.523331 -0.429486
+0.167124 0.716754 -0.49412 -0.462794
+0.119888 0.72615 -0.462794 -0.49412
+0.0721386 0.732436 -0.429486 -0.523331
+0.0240805 0.735586 -0.394339 -0.5503
+-0.0240807 0.735586 -0.357504 -0.574913
+-0.0721387 0.732436 -0.319137 -0.597064
+-0.119888 0.72615 -0.279404 -0.616658
+-0.167124 0.716754 -0.238474 -0.633611
+-0.213644 0.704289 -0.196524 -0.647852
+-0.259249 0.688808 -0.153731 -0.659318
+-0.303744 0.670378 -0.110281 -0.667961
+-0.346939 0.649077 -0.066358 -0.673743
+-0.388647 0.624996 -0.0221509 -0.676641
+-0.428692 0.598239 0.022151 -0.676641
+-0.466901 0.56892 0.0663579 -0.673743
+-0.50311 0.537165 0.110281 -0.667961
+-0.537165 0.50311 0.153731 -0.659318
+-0.56892 0.466901 0.196524 -0.647852
+-0.598239 0.428692 0.238474 -0.633611
+-0.624996 0.388647 0.279404 -0.616658
+-0.649076 0.346939 0.319137 -0.597064
+-0.670378 0.303744 0.357504 -0.574913
+-0.688808 0.259249 0.394339 -0.5503
+-0.704289 0.213644 0.429486 -0.523331
+-0.716754 0.167124 0.462794 -0.49412
+-0.72615 0.119888 0.49412 -0.462794
+-0.732436 0.0721386 0.523331 -0.429486
+-0.735586 0.0240807 0.5503 -0.394339
+0.735586 0.0240806 -0.633611 0.238474
+0.732436 0.0721387 -0.647852 0.196524
+0.72615 0.119888 -0.659318 0.153731
+0.716754 0.167124 -0.667961 0.110281
+0.704289 0.213644 -0.673743 0.0663579
+0.688808 0.259249 -0.676641 0.0221509
+0.670378 0.303744 -0.676641 -0.0221509
+0.649076 0.346939 -0.673743 -0.0663579
+0.624996 0.388647 -0.667961 -0.110281
+0.598239 0.428692 -0.659318 -0.153731
+0.56892 0.466901 -0.647852 -0.196524
+0.537165 0.50311 -0.633611 -0.238474
+0.50311 0.537165 -0.616658 -0.279404
+0.466901 0.56892 -0.597064 -0.319137
+0.428692 0.598239 -0.574913 -0.357504
+0.388647 0.624996 -0.5503 -0.394339
+0.346939 0.649077 -0.523331 -0.429486
+0.303744 0.670378 -0.49412 -0.462794
+0.259249 0.688808 -0.462794 -0.49412
+0.213644 0.704289 -0.429486 -0.523331
+0.167124 0.716754 -0.394339 -0.5503
+0.119888 0.72615 -0.357504 -0.574913
+0.0721386 0.732436 -0.319137 -0.597064
+0.0240805 0.735586 -0.279404 -0.616658
+-0.0240807 0.735586 -0.238474 -0.633611
+-0.0721387 0.732436 -0.196524 -0.647852
+-0.119888 0.72615 -0.153731 -0.659318
+-0.167124 0.716754 -0.110281 -0.667961
+-0.213644 0.704289 -0.0663578 -0.673743
+-0.259249 0.688808 -0.0221508 -0.676641
+-0.303744 0.670378 0.022151 -0.676641
+-0.346939 0.649077 0.0663578 -0.673743
+-0.388647 0.624996 0.110281 -0.667961
+-0.428692 0.598239 0.153731 -0.659318
+-0.466901 0.56892 0.196524 -0.647852
+-0.50311 0.537165 0.238474 -0.633611
+-0.537165 0.50311 0.279404 -0.616658
+-0.56892 0.466901 0.319137 -0.597064
+-0.598239 0.428692 0.357504 -0.574913
+-0.624996 0.388647 0.394339 -0.5503
+-0.649076 0.346939 0.429486 -0.523331
+-0.670378 0.303744 0.462794 -0.49412
+-0.688808 0.259249 0.49412 -0.462794
+-0.704289 0.213644 0.523331 -0.429486
+-0.716754 0.167124 0.5503 -0.394339
+-0.72615 0.119888 0.574913 -0.357504
+-0.732436 0.0721386 0.597064 -0.319137
+-0.735586 0.0240807 0.616658 -0.279404
+0.763354 0.0249896 -0.623502 0.167067
+0.760085 0.0748618 -0.633094 0.12593
+0.753561 0.124413 -0.639975 0.0842543
+0.743811 0.173432 -0.644115 0.0422175
+0.730875 0.221709 -0.645497 -4.81092e-10
+0.71481 0.269035 -0.644115 -0.0422175
+0.695684 0.31521 -0.639975 -0.0842543
+0.673578 0.360035 -0.633094 -0.12593
+0.648589 0.403318 -0.623502 -0.167067
+0.620822 0.444875 -0.611241 -0.207488
+0.590396 0.484526 -0.596362 -0.247021
+0.557443 0.522102 -0.578929 -0.285496
+0.522102 0.557443 -0.559017 -0.322749
+0.484526 0.590396 -0.536711 -0.358619
+0.444875 0.620822 -0.512107 -0.392954
+0.403318 0.648589 -0.48531 -0.425606
+0.360035 0.673579 -0.456435 -0.456436
+0.31521 0.695684 -0.425606 -0.485311
+0.269035 0.71481 -0.392954 -0.512107
+0.221709 0.730875 -0.358619 -0.536711
+0.173432 0.743811 -0.322749 -0.559017
+0.124413 0.753561 -0.285496 -0.578929
+0.0748617 0.760085 -0.247021 -0.596362
+0.0249895 0.763354 -0.207488 -0.611241
+-0.0249897 0.763354 -0.167067 -0.623502
+-0.0748619 0.760085 -0.12593 -0.633094
+-0.124414 0.753561 -0.0842542 -0.639975
+-0.173432 0.743811 -0.0422175 -0.644115
+-0.221709 0.730875 1.05646e-07 -0.645497
+-0.269036 0.71481 0.0422176 -0.644115
+-0.31521 0.695684 0.0842544 -0.639975
+-0.360035 0.673579 0.12593 -0.633094
+-0.403318 0.648589 0.167067 -0.623502
+-0.444875 0.620822 0.207488 -0.611241
+-0.484526 0.590397 0.247021 -0.596362
+-0.522102 0.557443 0.285496 -0.578929
+-0.557443 0.522102 0.322749 -0.559017
+-0.590397 0.484526 0.358619 -0.536711
+-0.620822 0.444875 0.392954 -0.512107
+-0.648589 0.403318 0.425606 -0.485311
+-0.673578 0.360035 0.456435 -0.456436
+-0.695684 0.31521 0.485311 -0.425606
+-0.71481 0.269035 0.512107 -0.392954
+-0.730875 0.221709 0.536711 -0.358619
+-0.743811 0.173432 0.559017 -0.322749
+-0.753561 0.124414 0.578929 -0.285496
+-0.760085 0.0748618 0.596362 -0.247021
+-0.763354 0.0249897 0.611241 -0.207488
+0.763354 0.0249896 -0.578929 0.285496
+0.760085 0.0748618 -0.596362 0.247021
+0.753561 0.124413 -0.611241 0.207488
+0.743811 0.173432 -0.623502 0.167067
+0.730875 0.221709 -0.633094 0.12593
+0.71481 0.269035 -0.639975 0.0842543
+0.695684 0.31521 -0.644115 0.0422175
+0.673578 0.360035 -0.645497 -1.36267e-08
+0.648589 0.403318 -0.644115 -0.0422175
+0.620822 0.444875 -0.639975 -0.0842543
+0.590396 0.484526 -0.633094 -0.12593
+0.557443 0.522102 -0.623502 -0.167067
+0.522102 0.557443 -0.611241 -0.207488
+0.484526 0.590396 -0.596362 -0.247021
+0.444875 0.620822 -0.578929 -0.285496
+0.403318 0.648589 -0.559017 -0.322749
+0.360035 0.673579 -0.536711 -0.358619
+0.31521 0.695684 -0.512107 -0.392954
+0.269035 0.71481 -0.48531 -0.425606
+0.221709 0.730875 -0.456435 -0.456435
+0.173432 0.743811 -0.425606 -0.485311
+0.124413 0.753561 -0.392954 -0.512107
+0.0748617 0.760085 -0.358619 -0.536711
+0.0249895 0.763354 -0.322749 -0.559017
+-0.0249897 0.763354 -0.285496 -0.578929
+-0.0748619 0.760085 -0.247021 -0.596362
+-0.124414 0.753561 -0.207488 -0.611241
+-0.173432 0.743811 -0.167067 -0.623502
+-0.221709 0.730875 -0.12593 -0.633094
+-0.269036 0.71481 -0.0842542 -0.639975
+-0.31521 0.695684 -0.0422174 -0.644115
+-0.360035 0.673579 -7.35816e-08 -0.645497
+-0.403318 0.648589 0.0422175 -0.644115
+-0.444875 0.620822 0.0842544 -0.639975
+-0.484526 0.590397 0.12593 -0.633094
+-0.522102 0.557443 0.167067 -0.623502
+-0.557443 0.522102 0.207488 -0.611241
+-0.590397 0.484526 0.247021 -0.596362
+-0.620822 0.444875 0.285496 -0.578929
+-0.648589 0.403318 0.322749 -0.559017
+-0.673578 0.360035 0.358619 -0.536711
+-0.695684 0.31521 0.392954 -0.512107
+-0.71481 0.269035 0.425606 -0.48531
+-0.730875 0.221709 0.456435 -0.456435
+-0.743811 0.173432 0.485311 -0.425606
+-0.753561 0.124414 0.512107 -0.392954
+-0.760085 0.0748618 0.536711 -0.358619
+-0.763354 0.0249897 0.559017 -0.322749
+0.790146 0.0258667 -0.573123 0.215708
+0.786763 0.0774894 -0.586004 0.177762
+0.78001 0.12878 -0.596375 0.139055
+0.769917 0.17952 -0.604193 0.0997527
+0.756528 0.22949 -0.609424 0.060023
+0.739899 0.278478 -0.612045 0.0200363
+0.720101 0.326274 -0.612045 -0.0200363
+0.69722 0.372672 -0.609424 -0.060023
+0.671353 0.417474 -0.604193 -0.0997527
+0.642612 0.460489 -0.596375 -0.139055
+0.611118 0.501532 -0.586004 -0.177762
+0.577008 0.540427 -0.573123 -0.215708
+0.540427 0.577008 -0.557788 -0.25273
+0.501532 0.611118 -0.540064 -0.28867
+0.460489 0.642612 -0.520028 -0.323374
+0.417474 0.671353 -0.497765 -0.356693
+0.372672 0.69722 -0.47337 -0.388485
+0.326274 0.720101 -0.446949 -0.418613
+0.278478 0.739899 -0.418613 -0.446949
+0.22949 0.756528 -0.388485 -0.47337
+0.17952 0.769917 -0.356693 -0.497765
+0.12878 0.78001 -0.323374 -0.520028
+0.0774893 0.786763 -0.28867 -0.540064
+0.0258666 0.790146 -0.25273 -0.557788
+-0.0258668 0.790146 -0.215708 -0.573123
+-0.0774894 0.786763 -0.177762 -0.586004
+-0.12878 0.78001 -0.139055 -0.596375
+-0.17952 0.769917 -0.0997527 -0.604193
+-0.22949 0.756528 -0.0600229 -0.609424
+-0.278478 0.739899 -0.0200362 -0.612045
+-0.326274 0.720101 0.0200363 -0.612045
+-0.372672 0.69722 0.0600229 -0.609424
+-0.417474 0.671353 0.0997527 -0.604193
+-0.460489 0.642612 0.139055 -0.596375
+-0.501532 0.611118 0.177762 -0.586004
+-0.540427 0.577008 0.215708 -0.573123
+-0.577008 0.540427 0.25273 -0.557788
+-0.611118 0.501532 0.28867 -0.540064
+-0.642612 0.460489 0.323374 -0.520028
+-0.671353 0.417474 0.356693 -0.497765
+-0.69722 0.372672 0.388485 -0.47337
+-0.720101 0.326274 0.418613 -0.446949
+-0.739899 0.278478 0.446949 -0.418613
+-0.756528 0.22949 0.47337 -0.388485
+-0.769917 0.179519 0.497765 -0.356693
+-0.78001 0.12878 0.520028 -0.323374
+-0.786763 0.0774893 0.540064 -0.28867
+-0.790146 0.0258668 0.557788 -0.25273
+0.735586 0.0240806 -0.676641 -0.0221509
+0.732436 0.0721387 -0.673743 -0.0663579
+0.72615 0.119888 -0.667961 -0.110281
+0.716754 0.167124 -0.659318 -0.153731
+0.704289 0.213644 -0.647852 -0.196524
+0.688808 0.259249 -0.633611 -0.238474
+0.670378 0.303744 -0.616658 -0.279404
+0.649076 0.346939 -0.597064 -0.319137
+0.624996 0.388647 -0.574913 -0.357504
+0.598239 0.428692 -0.5503 -0.394339
+0.56892 0.466901 -0.523331 -0.429486
+0.537165 0.50311 -0.49412 -0.462794
+0.50311 0.537165 -0.462794 -0.49412
+0.466901 0.56892 -0.429486 -0.523331
+0.428692 0.598239 -0.394339 -0.5503
+0.388647 0.624996 -0.357504 -0.574913
+0.346939 0.649077 -0.319137 -0.597064
+0.303744 0.670378 -0.279404 -0.616658
+0.259249 0.688808 -0.238474 -0.633611
+0.213644 0.704289 -0.196524 -0.647852
+0.167124 0.716754 -0.153731 -0.659318
+0.119888 0.72615 -0.110281 -0.667961
+0.0721386 0.732436 -0.0663578 -0.673743
+0.0240805 0.735586 -0.0221508 -0.676641
+-0.0240807 0.735586 0.022151 -0.676641
+-0.0721387 0.732436 0.066358 -0.673743
+-0.119888 0.72615 0.110281 -0.667961
+-0.167124 0.716754 0.153731 -0.659318
+-0.213644 0.704289 0.196524 -0.647852
+-0.259249 0.688808 0.238475 -0.633611
+-0.303744 0.670378 0.279404 -0.616658
+-0.346939 0.649077 0.319137 -0.597064
+-0.388647 0.624996 0.357504 -0.574913
+-0.428692 0.598239 0.394339 -0.5503
+-0.466901 0.56892 0.429486 -0.523331
+-0.50311 0.537165 0.462794 -0.49412
+-0.537165 0.50311 0.49412 -0.462794
+-0.56892 0.466901 0.523331 -0.429486
+-0.598239 0.428692 0.5503 -0.394339
+-0.624996 0.388647 0.574913 -0.357504
+-0.649076 0.346939 0.597063 -0.319137
+-0.670378 0.303744 0.616658 -0.279404
+-0.688808 0.259249 0.633611 -0.238474
+-0.704289 0.213644 0.647852 -0.196524
+-0.716754 0.167124 0.659318 -0.153731
+-0.72615 0.119888 0.667961 -0.110281
+-0.732436 0.0721386 0.673743 -0.0663579
+-0.735586 0.0240807 0.676641 -0.022151
+0.763354 0.0249896 -0.639975 -0.0842543
+0.760085 0.0748618 -0.633094 -0.12593
+0.753561 0.124413 -0.623502 -0.167067
+0.743811 0.173432 -0.611241 -0.207488
+0.730875 0.221709 -0.596362 -0.247021
+0.71481 0.269035 -0.578929 -0.285496
+0.695684 0.31521 -0.559017 -0.322749
+0.673578 0.360035 -0.536711 -0.358619
+0.648589 0.403318 -0.512107 -0.392954
+0.620822 0.444875 -0.485311 -0.425606
+0.590396 0.484526 -0.456435 -0.456435
+0.557443 0.522102 -0.425606 -0.485311
+0.522102 0.557443 -0.392954 -0.512107
+0.484526 0.590396 -0.358619 -0.536711
+0.444875 0.620822 -0.322749 -0.559017
+0.403318 0.648589 -0.285496 -0.578929
+0.360035 0.673579 -0.247021 -0.596362
+0.31521 0.695684 -0.207488 -0.611241
+0.269035 0.71481 -0.167067 -0.623502
+0.221709 0.730875 -0.12593 -0.633094
+0.173432 0.743811 -0.0842543 -0.639975
+0.124413 0.753561 -0.0422175 -0.644115
+0.0748617 0.760085 7.93547e-08 -0.645497
+0.0249895 0.763354 0.0422176 -0.644115
+-0.0249897 0.763354 0.0842544 -0.639975
+-0.0748619 0.760085 0.12593 -0.633094
+-0.124414 0.753561 0.167067 -0.623502
+-0.173432 0.743811 0.207488 -0.611241
+-0.221709 0.730875 0.247021 -0.596362
+-0.269036 0.71481 0.285496 -0.578929
+-0.31521 0.695684 0.322749 -0.559017
+-0.360035 0.673579 0.358619 -0.536711
+-0.403318 0.648589 0.392954 -0.512107
+-0.444875 0.620822 0.425606 -0.48531
+-0.484526 0.590397 0.456435 -0.456435
+-0.522102 0.557443 0.485311 -0.425606
+-0.557443 0.522102 0.512107 -0.392954
+-0.590397 0.484526 0.536711 -0.358619
+-0.620822 0.444875 0.559017 -0.322749
+-0.648589 0.403318 0.578929 -0.285496
+-0.673578 0.360035 0.596362 -0.247021
+-0.695684 0.31521 0.611241 -0.207488
+-0.71481 0.269035 0.623502 -0.167067
+-0.730875 0.221709 0.633094 -0.12593
+-0.743811 0.173432 0.639975 -0.0842542
+-0.753561 0.124414 0.644115 -0.0422176
+-0.760085 0.0748618 0.645497 3.0621e-08
+-0.763354 0.0249897 0.644115 0.0422175
+0.763354 0.0249896 -0.644115 0.0422175
+0.760085 0.0748618 -0.645497 -1.76347e-09
+0.753561 0.124413 -0.644115 -0.0422175
+0.743811 0.173432 -0.639975 -0.0842543
+0.730875 0.221709 -0.633094 -0.12593
+0.71481 0.269035 -0.623502 -0.167067
+0.695684 0.31521 -0.611241 -0.207488
+0.673578 0.360035 -0.596362 -0.247021
+0.648589 0.403318 -0.578929 -0.285496
+0.620822 0.444875 -0.559017 -0.322749
+0.590396 0.484526 -0.536711 -0.358619
+0.557443 0.522102 -0.512107 -0.392954
+0.522102 0.557443 -0.485311 -0.425606
+0.484526 0.590396 -0.456435 -0.456435
+0.444875 0.620822 -0.425606 -0.485311
+0.403318 0.648589 -0.392954 -0.512107
+0.360035 0.673579 -0.358619 -0.536711
+0.31521 0.695684 -0.322749 -0.559017
+0.269035 0.71481 -0.285496 -0.578929
+0.221709 0.730875 -0.247021 -0.596362
+0.173432 0.743811 -0.207488 -0.611241
+0.124413 0.753561 -0.167067 -0.623502
+0.0748617 0.760085 -0.12593 -0.633094
+0.0249895 0.763354 -0.0842542 -0.639975
+-0.0249897 0.763354 -0.0422175 -0.644115
+-0.0748619 0.760085 5.40257e-08 -0.645497
+-0.124414 0.753561 0.0422176 -0.644115
+-0.173432 0.743811 0.0842543 -0.639975
+-0.221709 0.730875 0.12593 -0.633094
+-0.269036 0.71481 0.167067 -0.623502
+-0.31521 0.695684 0.207488 -0.611241
+-0.360035 0.673579 0.247021 -0.596362
+-0.403318 0.648589 0.285496 -0.578929
+-0.444875 0.620822 0.322749 -0.559017
+-0.484526 0.590397 0.358619 -0.536711
+-0.522102 0.557443 0.392954 -0.512107
+-0.557443 0.522102 0.425606 -0.485311
+-0.590397 0.484526 0.456435 -0.456435
+-0.620822 0.444875 0.48531 -0.425606
+-0.648589 0.403318 0.512107 -0.392954
+-0.673578 0.360035 0.536711 -0.358619
+-0.695684 0.31521 0.559017 -0.322749
+-0.71481 0.269035 0.578929 -0.285496
+-0.730875 0.221709 0.596362 -0.247021
+-0.743811 0.173432 0.611241 -0.207488
+-0.753561 0.124414 0.623502 -0.167067
+-0.760085 0.0748618 0.633094 -0.12593
+-0.763354 0.0249897 0.639975 -0.0842543
+0.790146 0.0258667 -0.612045 -0.0200363
+0.786763 0.0774894 -0.609424 -0.060023
+0.78001 0.12878 -0.604193 -0.0997527
+0.769917 0.17952 -0.596375 -0.139055
+0.756528 0.22949 -0.586004 -0.177762
+0.739899 0.278478 -0.573123 -0.215708
+0.720101 0.326274 -0.557788 -0.25273
+0.69722 0.372672 -0.540064 -0.28867
+0.671353 0.417474 -0.520028 -0.323374
+0.642612 0.460489 -0.497765 -0.356693
+0.611118 0.501532 -0.47337 -0.388485
+0.577008 0.540427 -0.446949 -0.418613
+0.540427 0.577008 -0.418613 -0.446949
+0.501532 0.611118 -0.388485 -0.47337
+0.460489 0.642612 -0.356693 -0.497765
+0.417474 0.671353 -0.323374 -0.520028
+0.372672 0.69722 -0.28867 -0.540064
+0.326274 0.720101 -0.25273 -0.557788
+0.278478 0.739899 -0.215708 -0.573123
+0.22949 0.756528 -0.177762 -0.586004
+0.17952 0.769917 -0.139055 -0.596375
+0.12878 0.78001 -0.0997526 -0.604193
+0.0774893 0.786763 -0.0600229 -0.609424
+0.0258666 0.790146 -0.0200362 -0.612045
+-0.0258668 0.790146 0.0200363 -0.612045
+-0.0774894 0.786763 0.060023 -0.609424
+-0.12878 0.78001 0.0997527 -0.604193
+-0.17952 0.769917 0.139055 -0.596375
+-0.22949 0.756528 0.177762 -0.586004
+-0.278478 0.739899 0.215708 -0.573123
+-0.326274 0.720101 0.25273 -0.557788
+-0.372672 0.69722 0.28867 -0.540064
+-0.417474 0.671353 0.323374 -0.520028
+-0.460489 0.642612 0.356693 -0.497765
+-0.501532 0.611118 0.388485 -0.47337
+-0.540427 0.577008 0.418613 -0.446949
+-0.577008 0.540427 0.446949 -0.418613
+-0.611118 0.501532 0.47337 -0.388485
+-0.642612 0.460489 0.497765 -0.356693
+-0.671353 0.417474 0.520028 -0.323374
+-0.69722 0.372672 0.540064 -0.28867
+-0.720101 0.326274 0.557788 -0.25273
+-0.739899 0.278478 0.573123 -0.215708
+-0.756528 0.22949 0.586004 -0.177762
+-0.769917 0.179519 0.596375 -0.139055
+-0.78001 0.12878 0.604193 -0.0997527
+-0.786763 0.0774893 0.609424 -0.060023
+-0.790146 0.0258668 0.612045 -0.0200363
+0.790146 0.0258667 -0.596375 -0.139055
+0.786763 0.0774894 -0.586004 -0.177762
+0.78001 0.12878 -0.573123 -0.215708
+0.769917 0.17952 -0.557788 -0.25273
+0.756528 0.22949 -0.540064 -0.28867
+0.739899 0.278478 -0.520028 -0.323374
+0.720101 0.326274 -0.497765 -0.356693
+0.69722 0.372672 -0.47337 -0.388485
+0.671353 0.417474 -0.446949 -0.418613
+0.642612 0.460489 -0.418613 -0.446949
+0.611118 0.501532 -0.388485 -0.47337
+0.577008 0.540427 -0.356693 -0.497765
+0.540427 0.577008 -0.323374 -0.520028
+0.501532 0.611118 -0.28867 -0.540064
+0.460489 0.642612 -0.25273 -0.557788
+0.417474 0.671353 -0.215708 -0.573123
+0.372672 0.69722 -0.177762 -0.586004
+0.326274 0.720101 -0.139055 -0.596375
+0.278478 0.739899 -0.0997527 -0.604193
+0.22949 0.756528 -0.060023 -0.609424
+0.17952 0.769917 -0.0200362 -0.612045
+0.12878 0.78001 0.0200363 -0.612045
+0.0774893 0.786763 0.0600231 -0.609424
+0.0258666 0.790146 0.0997528 -0.604193
+-0.0258668 0.790146 0.139055 -0.596375
+-0.0774894 0.786763 0.177762 -0.586004
+-0.12878 0.78001 0.215708 -0.573123
+-0.17952 0.769917 0.25273 -0.557788
+-0.22949 0.756528 0.28867 -0.540064
+-0.278478 0.739899 0.323374 -0.520028
+-0.326274 0.720101 0.356693 -0.497765
+-0.372672 0.69722 0.388485 -0.47337
+-0.417474 0.671353 0.418613 -0.446949
+-0.460489 0.642612 0.446949 -0.418613
+-0.501532 0.611118 0.47337 -0.388485
+-0.540427 0.577008 0.497765 -0.356693
+-0.577008 0.540427 0.520028 -0.323374
+-0.611118 0.501532 0.540064 -0.28867
+-0.642612 0.460489 0.557788 -0.25273
+-0.671353 0.417474 0.573123 -0.215708
+-0.69722 0.372672 0.586004 -0.177762
+-0.720101 0.326274 0.596375 -0.139055
+-0.739899 0.278478 0.604193 -0.0997526
+-0.756528 0.22949 0.609424 -0.060023
+-0.769917 0.179519 0.612045 -0.0200362
+-0.78001 0.12878 0.612045 0.0200362
+-0.786763 0.0774893 0.609424 0.060023
+-0.790146 0.0258668 0.604193 0.0997526
+0.816059 0.026715 -0.54671 -0.185583
+0.812565 0.0800307 -0.533402 -0.220942
+0.805591 0.133004 -0.51781 -0.255355
+0.795167 0.185407 -0.5 -0.288675
+0.781339 0.237016 -0.480049 -0.320759
+0.764164 0.287611 -0.458043 -0.351469
+0.743717 0.336974 -0.434075 -0.380673
+0.720086 0.384894 -0.408248 -0.408248
+0.693371 0.431166 -0.380673 -0.434075
+0.663687 0.475591 -0.351469 -0.458043
+0.63116 0.51798 -0.320759 -0.480049
+0.595932 0.558151 -0.288675 -0.5
+0.558151 0.595932 -0.255355 -0.51781
+0.51798 0.63116 -0.220942 -0.533402
+0.475591 0.663687 -0.185583 -0.54671
+0.431166 0.693371 -0.149429 -0.557678
+0.384894 0.720086 -0.112635 -0.566257
+0.336974 0.743717 -0.0753593 -0.572411
+0.287611 0.764164 -0.0377605 -0.576114
+0.237016 0.781339 2.48065e-08 -0.57735
+0.185407 0.795167 0.0377605 -0.576114
+0.133003 0.805591 0.0753594 -0.572411
+0.0800306 0.812565 0.112636 -0.566257
+0.0267149 0.816059 0.149429 -0.557678
+-0.0267151 0.816059 0.185583 -0.54671
+-0.0800307 0.812565 0.220942 -0.533402
+-0.133004 0.805591 0.255356 -0.51781
+-0.185407 0.795167 0.288675 -0.5
+-0.237017 0.781338 0.320759 -0.480049
+-0.287611 0.764164 0.351469 -0.458043
+-0.336974 0.743717 0.380674 -0.434075
+-0.384894 0.720086 0.408248 -0.408248
+-0.431166 0.693371 0.434075 -0.380673
+-0.475591 0.663686 0.458043 -0.351469
+-0.51798 0.63116 0.480049 -0.320759
+-0.558151 0.595931 0.5 -0.288675
+-0.595931 0.558151 0.51781 -0.255356
+-0.63116 0.51798 0.533402 -0.220942
+-0.663686 0.475591 0.54671 -0.185583
+-0.693371 0.431166 0.557678 -0.149429
+-0.720086 0.384894 0.566257 -0.112636
+-0.743717 0.336974 0.572411 -0.0753593
+-0.764164 0.287611 0.576114 -0.0377605
+-0.781339 0.237016 0.57735 -1.87823e-08
+-0.795167 0.185407 0.576114 0.0377606
+-0.805591 0.133004 0.572411 0.0753593
+-0.812565 0.0800306 0.566257 0.112635
+-0.816059 0.0267151 0.557678 0.149429
+0.816059 0.026715 -0.572411 -0.0753593
+0.812565 0.0800307 -0.566257 -0.112635
+0.805591 0.133004 -0.557678 -0.149429
+0.795167 0.185407 -0.54671 -0.185583
+0.781339 0.237016 -0.533402 -0.220942
+0.764164 0.287611 -0.51781 -0.255355
+0.743717 0.336974 -0.5 -0.288675
+0.720086 0.384894 -0.480049 -0.320759
+0.693371 0.431166 -0.458043 -0.351469
+0.663687 0.475591 -0.434075 -0.380673
+0.63116 0.51798 -0.408248 -0.408248
+0.595932 0.558151 -0.380673 -0.434075
+0.558151 0.595932 -0.351469 -0.458043
+0.51798 0.63116 -0.320759 -0.480049
+0.475591 0.663687 -0.288675 -0.5
+0.431166 0.693371 -0.255355 -0.51781
+0.384894 0.720086 -0.220942 -0.533402
+0.336974 0.743717 -0.185583 -0.54671
+0.287611 0.764164 -0.149429 -0.557678
+0.237016 0.781339 -0.112635 -0.566257
+0.185407 0.795167 -0.0753593 -0.572411
+0.133003 0.805591 -0.0377604 -0.576114
+0.0800306 0.812565 7.0977e-08 -0.57735
+0.0267149 0.816059 0.0377606 -0.576114
+-0.0267151 0.816059 0.0753594 -0.572411
+-0.0800307 0.812565 0.112635 -0.566257
+-0.133004 0.805591 0.149429 -0.557678
+-0.185407 0.795167 0.185583 -0.54671
+-0.237017 0.781338 0.220942 -0.533402
+-0.287611 0.764164 0.255356 -0.51781
+-0.336974 0.743717 0.288675 -0.5
+-0.384894 0.720086 0.320759 -0.480049
+-0.431166 0.693371 0.351469 -0.458043
+-0.475591 0.663686 0.380674 -0.434075
+-0.51798 0.63116 0.408248 -0.408248
+-0.558151 0.595931 0.434075 -0.380673
+-0.595931 0.558151 0.458043 -0.351469
+-0.63116 0.51798 0.480049 -0.320759
+-0.663686 0.475591 0.5 -0.288675
+-0.693371 0.431166 0.51781 -0.255355
+-0.720086 0.384894 0.533402 -0.220942
+-0.743717 0.336974 0.54671 -0.185583
+-0.764164 0.287611 0.557678 -0.149429
+-0.781339 0.237016 0.566257 -0.112635
+-0.795167 0.185407 0.572411 -0.0753593
+-0.805591 0.133004 0.576114 -0.0377605
+-0.812565 0.0800306 0.57735 2.73883e-08
+-0.816059 0.0267151 0.576114 0.0377605
+0.841175 0.0275372 -0.525954 -0.122635
+0.837573 0.0824937 -0.516807 -0.156772
+0.830384 0.137097 -0.505447 -0.190237
+0.81964 0.191113 -0.491923 -0.222887
+0.805385 0.244311 -0.476292 -0.254583
+0.787682 0.296463 -0.458622 -0.285189
+0.766606 0.347345 -0.438987 -0.314574
+0.742247 0.396739 -0.417473 -0.342612
+0.71471 0.444435 -0.394172 -0.369182
+0.684112 0.490228 -0.369182 -0.394172
+0.650585 0.533921 -0.342612 -0.417473
+0.614272 0.575329 -0.314574 -0.438987
+0.575329 0.614272 -0.285189 -0.458622
+0.533922 0.650585 -0.254583 -0.476292
+0.490228 0.684112 -0.222887 -0.491923
+0.444435 0.71471 -0.190237 -0.505447
+0.396739 0.742247 -0.156772 -0.516807
+0.347345 0.766606 -0.122635 -0.525954
+0.296463 0.787682 -0.0879736 -0.532848
+0.244311 0.805385 -0.0529353 -0.537461
+0.191113 0.81964 -0.0176703 -0.539773
+0.137097 0.830384 0.0176704 -0.539773
+0.0824936 0.837573 0.0529354 -0.537461
+0.0275371 0.841175 0.0879737 -0.532848
+-0.0275373 0.841175 0.122635 -0.525954
+-0.0824938 0.837573 0.156772 -0.516807
+-0.137097 0.830384 0.190237 -0.505447
+-0.191113 0.81964 0.222887 -0.491923
+-0.244311 0.805385 0.254583 -0.476292
+-0.296463 0.787682 0.285189 -0.458622
+-0.347345 0.766606 0.314574 -0.438987
+-0.396739 0.742247 0.342611 -0.417473
+-0.444435 0.71471 0.369182 -0.394172
+-0.490228 0.684112 0.394172 -0.369182
+-0.533921 0.650585 0.417473 -0.342612
+-0.575329 0.614272 0.438987 -0.314574
+-0.614272 0.575329 0.458622 -0.285189
+-0.650585 0.533921 0.476292 -0.254583
+-0.684112 0.490228 0.491923 -0.222887
+-0.71471 0.444435 0.505447 -0.190237
+-0.742247 0.39674 0.516807 -0.156772
+-0.766606 0.347345 0.525954 -0.122635
+-0.787682 0.296463 0.532848 -0.0879736
+-0.805385 0.244311 0.537461 -0.0529353
+-0.81964 0.191113 0.539773 -0.0176703
+-0.830384 0.137097 0.539773 0.0176703
+-0.837573 0.0824937 0.537461 0.0529353
+-0.841175 0.0275373 0.532848 0.0879736
+0.790146 0.0258667 -0.604193 0.0997527
+0.786763 0.0774894 -0.609424 0.060023
+0.78001 0.12878 -0.612045 0.0200363
+0.769917 0.17952 -0.612045 -0.0200363
+0.756528 0.22949 -0.609424 -0.060023
+0.739899 0.278478 -0.604193 -0.0997527
+0.720101 0.326274 -0.596375 -0.139055
+0.69722 0.372672 -0.586004 -0.177762
+0.671353 0.417474 -0.573123 -0.215708
+0.642612 0.460489 -0.557788 -0.25273
+0.611118 0.501532 -0.540064 -0.28867
+0.577008 0.540427 -0.520028 -0.323374
+0.540427 0.577008 -0.497765 -0.356693
+0.501532 0.611118 -0.47337 -0.388485
+0.460489 0.642612 -0.446949 -0.418613
+0.417474 0.671353 -0.418613 -0.446949
+0.372672 0.69722 -0.388485 -0.47337
+0.326274 0.720101 -0.356693 -0.497765
+0.278478 0.739899 -0.323374 -0.520028
+0.22949 0.756528 -0.28867 -0.540064
+0.17952 0.769917 -0.25273 -0.557788
+0.12878 0.78001 -0.215708 -0.573123
+0.0774893 0.786763 -0.177762 -0.586004
+0.0258666 0.790146 -0.139055 -0.596375
+-0.0258668 0.790146 -0.0997526 -0.604193
+-0.0774894 0.786763 -0.0600229 -0.609424
+-0.12878 0.78001 -0.0200362 -0.612045
+-0.17952 0.769917 0.0200363 -0.612045
+-0.22949 0.756528 0.0600231 -0.609424
+-0.278478 0.739899 0.0997528 -0.604193
+-0.326274 0.720101 0.139055 -0.596375
+-0.372672 0.69722 0.177762 -0.586004
+-0.417474 0.671353 0.215708 -0.573123
+-0.460489 0.642612 0.25273 -0.557788
+-0.501532 0.611118 0.28867 -0.540064
+-0.540427 0.577008 0.323374 -0.520028
+-0.577008 0.540427 0.356693 -0.497765
+-0.611118 0.501532 0.388485 -0.47337
+-0.642612 0.460489 0.418613 -0.446949
+-0.671353 0.417474 0.446949 -0.418613
+-0.69722 0.372672 0.47337 -0.388485
+-0.720101 0.326274 0.497765 -0.356693
+-0.739899 0.278478 0.520028 -0.323374
+-0.756528 0.22949 0.540064 -0.28867
+-0.769917 0.179519 0.557788 -0.25273
+-0.78001 0.12878 0.573123 -0.215708
+-0.786763 0.0774893 0.586004 -0.177762
+-0.790146 0.0258668 0.596375 -0.139055
+0.816059 0.026715 -0.576114 0.0377605
+0.812565 0.0800307 -0.57735 -1.5773e-09
+0.805591 0.133004 -0.576114 -0.0377605
+0.795167 0.185407 -0.572411 -0.0753593
+0.781339 0.237016 -0.566257 -0.112635
+0.764164 0.287611 -0.557678 -0.149429
+0.743717 0.336974 -0.54671 -0.185583
+0.720086 0.384894 -0.533402 -0.220942
+0.693371 0.431166 -0.51781 -0.255355
+0.663687 0.475591 -0.5 -0.288675
+0.63116 0.51798 -0.480049 -0.320759
+0.595932 0.558151 -0.458043 -0.351469
+0.558151 0.595932 -0.434075 -0.380673
+0.51798 0.63116 -0.408248 -0.408248
+0.475591 0.663687 -0.380673 -0.434075
+0.431166 0.693371 -0.351469 -0.458043
+0.384894 0.720086 -0.320759 -0.480049
+0.336974 0.743717 -0.288675 -0.5
+0.287611 0.764164 -0.255355 -0.51781
+0.237016 0.781339 -0.220942 -0.533402
+0.185407 0.795167 -0.185583 -0.54671
+0.133003 0.805591 -0.149429 -0.557678
+0.0800306 0.812565 -0.112635 -0.566257
+0.0267149 0.816059 -0.0753593 -0.572411
+-0.0267151 0.816059 -0.0377605 -0.576114
+-0.0800307 0.812565 4.83221e-08 -0.57735
+-0.133004 0.805591 0.0377606 -0.576114
+-0.185407 0.795167 0.0753594 -0.572411
+-0.237017 0.781338 0.112636 -0.566257
+-0.287611 0.764164 0.149429 -0.557678
+-0.336974 0.743717 0.185583 -0.54671
+-0.384894 0.720086 0.220942 -0.533402
+-0.431166 0.693371 0.255355 -0.51781
+-0.475591 0.663686 0.288675 -0.5
+-0.51798 0.63116 0.320759 -0.480049
+-0.558151 0.595931 0.351469 -0.458043
+-0.595931 0.558151 0.380673 -0.434075
+-0.63116 0.51798 0.408248 -0.408248
+-0.663686 0.475591 0.434075 -0.380674
+-0.693371 0.431166 0.458043 -0.351469
+-0.720086 0.384894 0.480049 -0.320759
+-0.743717 0.336974 0.5 -0.288675
+-0.764164 0.287611 0.51781 -0.255355
+-0.781339 0.237016 0.533402 -0.220942
+-0.795167 0.185407 0.54671 -0.185583
+-0.805591 0.133004 0.557678 -0.149429
+-0.812565 0.0800306 0.566257 -0.112635
+-0.816059 0.0267151 0.572411 -0.0753594
+0.816059 0.026715 -0.557678 0.149429
+0.812565 0.0800307 -0.566257 0.112635
+0.805591 0.133004 -0.572411 0.0753593
+0.795167 0.185407 -0.576114 0.0377605
+0.781339 0.237016 -0.57735 -4.30302e-10
+0.764164 0.287611 -0.576114 -0.0377605
+0.743717 0.336974 -0.572411 -0.0753593
+0.720086 0.384894 -0.566257 -0.112635
+0.693371 0.431166 -0.557678 -0.149429
+0.663687 0.475591 -0.54671 -0.185583
+0.63116 0.51798 -0.533402 -0.220942
+0.595932 0.558151 -0.51781 -0.255356
+0.558151 0.595932 -0.5 -0.288675
+0.51798 0.63116 -0.480049 -0.320759
+0.475591 0.663687 -0.458043 -0.351469
+0.431166 0.693371 -0.434075 -0.380674
+0.384894 0.720086 -0.408248 -0.408248
+0.336974 0.743717 -0.380673 -0.434075
+0.287611 0.764164 -0.351469 -0.458043
+0.237016 0.781339 -0.320759 -0.480049
+0.185407 0.795167 -0.288675 -0.5
+0.133003 0.805591 -0.255355 -0.51781
+0.0800306 0.812565 -0.220942 -0.533402
+0.0267149 0.816059 -0.185583 -0.54671
+-0.0267151 0.816059 -0.149429 -0.557678
+-0.0800307 0.812565 -0.112635 -0.566257
+-0.133004 0.805591 -0.0753593 -0.572411
+-0.185407 0.795167 -0.0377605 -0.576114
+-0.237017 0.781338 9.44926e-08 -0.57735
+-0.287611 0.764164 0.0377606 -0.576114
+-0.336974 0.743717 0.0753594 -0.572411
+-0.384894 0.720086 0.112635 -0.566257
+-0.431166 0.693371 0.149429 -0.557678
+-0.475591 0.663686 0.185583 -0.54671
+-0.51798 0.63116 0.220942 -0.533402
+-0.558151 0.595931 0.255356 -0.51781
+-0.595931 0.558151 0.288675 -0.5
+-0.63116 0.51798 0.320759 -0.480049
+-0.663686 0.475591 0.351469 -0.458043
+-0.693371 0.431166 0.380673 -0.434075
+-0.720086 0.384894 0.408248 -0.408248
+-0.743717 0.336974 0.434075 -0.380673
+-0.764164 0.287611 0.458043 -0.351469
+-0.781339 0.237016 0.480049 -0.320759
+-0.795167 0.185407 0.5 -0.288675
+-0.805591 0.133004 0.51781 -0.255356
+-0.812565 0.0800306 0.533402 -0.220942
+-0.816059 0.0267151 0.54671 -0.185583
+0.841175 0.0275372 -0.532848 0.0879736
+0.837573 0.0824937 -0.537461 0.0529353
+0.830384 0.137097 -0.539773 0.0176703
+0.81964 0.191113 -0.539773 -0.0176703
+0.805385 0.244311 -0.537461 -0.0529353
+0.787682 0.296463 -0.532848 -0.0879736
+0.766606 0.347345 -0.525954 -0.122635
+0.742247 0.396739 -0.516807 -0.156772
+0.71471 0.444435 -0.505447 -0.190237
+0.684112 0.490228 -0.491923 -0.222887
+0.650585 0.533921 -0.476292 -0.254583
+0.614272 0.575329 -0.458622 -0.285189
+0.575329 0.614272 -0.438987 -0.314574
+0.533922 0.650585 -0.417473 -0.342612
+0.490228 0.684112 -0.394172 -0.369182
+0.444435 0.71471 -0.369182 -0.394172
+0.396739 0.742247 -0.342611 -0.417473
+0.347345 0.766606 -0.314574 -0.438987
+0.296463 0.787682 -0.285189 -0.458622
+0.244311 0.805385 -0.254583 -0.476292
+0.191113 0.81964 -0.222887 -0.491923
+0.137097 0.830384 -0.190237 -0.505447
+0.0824936 0.837573 -0.156772 -0.516807
+0.0275371 0.841175 -0.122635 -0.525954
+-0.0275373 0.841175 -0.0879736 -0.532848
+-0.0824938 0.837573 -0.0529353 -0.537461
+-0.137097 0.830384 -0.0176703 -0.539773
+-0.191113 0.81964 0.0176704 -0.539773
+-0.244311 0.805385 0.0529354 -0.537461
+-0.296463 0.787682 0.0879737 -0.532848
+-0.347345 0.766606 0.122635 -0.525954
+-0.396739 0.742247 0.156772 -0.516807
+-0.444435 0.71471 0.190237 -0.505447
+-0.490228 0.684112 0.222887 -0.491923
+-0.533921 0.650585 0.254583 -0.476292
+-0.575329 0.614272 0.285189 -0.458622
+-0.614272 0.575329 0.314574 -0.438987
+-0.650585 0.533921 0.342612 -0.417473
+-0.684112 0.490228 0.369182 -0.394172
+-0.71471 0.444435 0.394172 -0.369182
+-0.742247 0.39674 0.417473 -0.342612
+-0.766606 0.347345 0.438987 -0.314574
+-0.787682 0.296463 0.458622 -0.285189
+-0.805385 0.244311 0.476292 -0.254583
+-0.81964 0.191113 0.491923 -0.222887
+-0.830384 0.137097 0.505447 -0.190237
+-0.837573 0.0824937 0.516807 -0.156772
+-0.841175 0.0275373 0.525954 -0.122635
+0.841175 0.0275372 -0.539773 -0.0176703
+0.837573 0.0824937 -0.537461 -0.0529353
+0.830384 0.137097 -0.532848 -0.0879736
+0.81964 0.191113 -0.525954 -0.122635
+0.805385 0.244311 -0.516807 -0.156772
+0.787682 0.296463 -0.505447 -0.190237
+0.766606 0.347345 -0.491923 -0.222887
+0.742247 0.396739 -0.476292 -0.254583
+0.71471 0.444435 -0.458622 -0.285189
+0.684112 0.490228 -0.438987 -0.314574
+0.650585 0.533921 -0.417473 -0.342612
+0.614272 0.575329 -0.394172 -0.369182
+0.575329 0.614272 -0.369182 -0.394172
+0.533922 0.650585 -0.342612 -0.417473
+0.490228 0.684112 -0.314574 -0.438987
+0.444435 0.71471 -0.285189 -0.458622
+0.396739 0.742247 -0.254583 -0.476292
+0.347345 0.766606 -0.222887 -0.491923
+0.296463 0.787682 -0.190237 -0.505447
+0.244311 0.805385 -0.156772 -0.516807
+0.191113 0.81964 -0.122635 -0.525954
+0.137097 0.830384 -0.0879735 -0.532848
+0.0824936 0.837573 -0.0529352 -0.537461
+0.0275371 0.841175 -0.0176703 -0.539773
+-0.0275373 0.841175 0.0176704 -0.539773
+-0.0824938 0.837573 0.0529354 -0.537461
+-0.137097 0.830384 0.0879736 -0.532848
+-0.191113 0.81964 0.122635 -0.525954
+-0.244311 0.805385 0.156772 -0.516807
+-0.296463 0.787682 0.190237 -0.505447
+-0.347345 0.766606 0.222887 -0.491923
+-0.396739 0.742247 0.254583 -0.476292
+-0.444435 0.71471 0.285189 -0.458622
+-0.490228 0.684112 0.314574 -0.438987
+-0.533921 0.650585 0.342612 -0.417473
+-0.575329 0.614272 0.369182 -0.394172
+-0.614272 0.575329 0.394172 -0.369182
+-0.650585 0.533921 0.417473 -0.342612
+-0.684112 0.490228 0.438987 -0.314574
+-0.71471 0.444435 0.458622 -0.285189
+-0.742247 0.39674 0.476292 -0.254583
+-0.766606 0.347345 0.491923 -0.222887
+-0.787682 0.296463 0.505447 -0.190237
+-0.805385 0.244311 0.516807 -0.156772
+-0.81964 0.191113 0.525954 -0.122635
+-0.830384 0.137097 0.532848 -0.0879736
+-0.837573 0.0824937 0.537461 -0.0529353
+-0.841175 0.0275373 0.539773 -0.0176704
+0.865562 0.0283356 -0.495722 -0.0652631
+0.861855 0.0848853 -0.490393 -0.0975452
+0.854458 0.141072 -0.482963 -0.12941
+0.843402 0.196654 -0.473465 -0.16072
+0.828735 0.251394 -0.46194 -0.191342
+0.810518 0.305057 -0.448436 -0.221144
+0.788831 0.357415 -0.433013 -0.25
+0.763766 0.408242 -0.415735 -0.277785
+0.735431 0.45732 -0.396677 -0.304381
+0.703946 0.50444 -0.37592 -0.329673
+0.669447 0.549401 -0.353553 -0.353553
+0.632081 0.592008 -0.329673 -0.37592
+0.592008 0.632081 -0.304381 -0.396677
+0.549401 0.669447 -0.277785 -0.415735
+0.50444 0.703946 -0.25 -0.433013
+0.45732 0.735431 -0.221144 -0.448436
+0.408241 0.763766 -0.191342 -0.46194
+0.357415 0.788831 -0.16072 -0.473465
+0.305057 0.810518 -0.129409 -0.482963
+0.251394 0.828735 -0.0975451 -0.490393
+0.196654 0.843402 -0.0652631 -0.495722
+0.141072 0.854458 -0.0327015 -0.498929
+0.0848852 0.861855 6.14679e-08 -0.5
+0.0283355 0.865562 0.0327016 -0.498929
+-0.0283356 0.865562 0.0652631 -0.495722
+-0.0848854 0.861855 0.0975452 -0.490393
+-0.141072 0.854458 0.12941 -0.482963
+-0.196654 0.843402 0.16072 -0.473465
+-0.251394 0.828735 0.191342 -0.46194
+-0.305058 0.810518 0.221144 -0.448436
+-0.357415 0.788831 0.25 -0.433013
+-0.408241 0.763766 0.277785 -0.415735
+-0.45732 0.735431 0.304381 -0.396677
+-0.504441 0.703946 0.329673 -0.37592
+-0.549401 0.669447 0.353553 -0.353553
+-0.592008 0.632081 0.37592 -0.329673
+-0.632081 0.592008 0.396677 -0.304381
+-0.669447 0.549401 0.415735 -0.277785
+-0.703946 0.504441 0.433013 -0.25
+-0.735431 0.45732 0.448436 -0.221144
+-0.763766 0.408242 0.46194 -0.191342
+-0.788831 0.357415 0.473465 -0.16072
+-0.810518 0.305057 0.482963 -0.129409
+-0.828735 0.251394 0.490393 -0.0975452
+-0.843402 0.196654 0.495722 -0.0652631
+-0.854458 0.141072 0.498929 -0.0327016
+-0.861855 0.0848853 0.5 2.3719e-08
+-0.865562 0.0283356 0.498929 0.0327015
+0.865562 0.0283356 -0.498929 0.0327016
+0.861855 0.0848853 -0.5 -1.36598e-09
+0.854458 0.141072 -0.498929 -0.0327016
+0.843402 0.196654 -0.495722 -0.0652631
+0.828735 0.251394 -0.490393 -0.0975452
+0.810518 0.305057 -0.482963 -0.12941
+0.788831 0.357415 -0.473465 -0.16072
+0.763766 0.408242 -0.46194 -0.191342
+0.735431 0.45732 -0.448436 -0.221144
+0.703946 0.50444 -0.433013 -0.25
+0.669447 0.549401 -0.415735 -0.277785
+0.632081 0.592008 -0.396677 -0.304381
+0.592008 0.632081 -0.37592 -0.329673
+0.549401 0.669447 -0.353553 -0.353553
+0.50444 0.703946 -0.329673 -0.37592
+0.45732 0.735431 -0.304381 -0.396677
+0.408241 0.763766 -0.277785 -0.415735
+0.357415 0.788831 -0.25 -0.433013
+0.305057 0.810518 -0.221144 -0.448436
+0.251394 0.828735 -0.191342 -0.46194
+0.196654 0.843402 -0.16072 -0.473465
+0.141072 0.854458 -0.129409 -0.482963
+0.0848852 0.861855 -0.0975451 -0.490393
+0.0283355 0.865562 -0.065263 -0.495722
+-0.0283356 0.865562 -0.0327015 -0.498929
+-0.0848854 0.861855 4.18481e-08 -0.5
+-0.141072 0.854458 0.0327016 -0.498929
+-0.196654 0.843402 0.0652631 -0.495722
+-0.251394 0.828735 0.0975452 -0.490393
+-0.305058 0.810518 0.12941 -0.482963
+-0.357415 0.788831 0.16072 -0.473465
+-0.408241 0.763766 0.191342 -0.46194
+-0.45732 0.735431 0.221144 -0.448436
+-0.504441 0.703946 0.25 -0.433013
+-0.549401 0.669447 0.277785 -0.415735
+-0.592008 0.632081 0.304381 -0.396677
+-0.632081 0.592008 0.329673 -0.37592
+-0.669447 0.549401 0.353553 -0.353553
+-0.703946 0.504441 0.37592 -0.329673
+-0.735431 0.45732 0.396677 -0.304381
+-0.763766 0.408242 0.415735 -0.277785
+-0.788831 0.357415 0.433013 -0.25
+-0.810518 0.305057 0.448436 -0.221144
+-0.828735 0.251394 0.46194 -0.191342
+-0.843402 0.196654 0.473465 -0.16072
+-0.854458 0.141072 0.482963 -0.12941
+-0.861855 0.0848853 0.490393 -0.0975451
+-0.865562 0.0283356 0.495722 -0.0652631
+0.88928 0.029112 -0.456191 -0.0149342
+0.885472 0.0872114 -0.454238 -0.0447385
+0.877872 0.144937 -0.450339 -0.0743513
+0.866513 0.202043 -0.444512 -0.103646
+0.851444 0.258283 -0.436782 -0.132496
+0.832728 0.313417 -0.427181 -0.160779
+0.810447 0.367209 -0.415751 -0.188374
+0.784695 0.419428 -0.40254 -0.215162
+0.755583 0.469852 -0.387606 -0.241029
+0.723236 0.518263 -0.371012 -0.265863
+0.687791 0.564456 -0.352829 -0.28956
+0.649401 0.608231 -0.333136 -0.312016
+0.608231 0.649401 -0.312016 -0.333136
+0.564456 0.687791 -0.28956 -0.352829
+0.518263 0.723236 -0.265863 -0.371012
+0.469852 0.755583 -0.241029 -0.387606
+0.419428 0.784695 -0.215162 -0.40254
+0.367209 0.810447 -0.188374 -0.415751
+0.313417 0.832728 -0.160779 -0.427181
+0.258283 0.851444 -0.132496 -0.436782
+0.202043 0.866513 -0.103646 -0.444512
+0.144937 0.877872 -0.0743512 -0.450339
+0.0872113 0.885472 -0.0447384 -0.454238
+0.0291119 0.88928 -0.0149341 -0.456191
+-0.0291121 0.88928 0.0149342 -0.456191
+-0.0872115 0.885472 0.0447385 -0.454238
+-0.144937 0.877872 0.0743513 -0.450339
+-0.202043 0.866513 0.103646 -0.444512
+-0.258283 0.851444 0.132496 -0.436781
+-0.313417 0.832728 0.160779 -0.427181
+-0.367209 0.810447 0.188374 -0.415751
+-0.419428 0.784695 0.215162 -0.40254
+-0.469852 0.755583 0.241029 -0.387606
+-0.518263 0.723236 0.265864 -0.371012
+-0.564456 0.687791 0.28956 -0.352829
+-0.608231 0.649401 0.312016 -0.333136
+-0.649401 0.608231 0.333136 -0.312016
+-0.687791 0.564456 0.352829 -0.28956
+-0.723236 0.518263 0.371012 -0.265864
+-0.755583 0.469852 0.387606 -0.241029
+-0.784695 0.419428 0.40254 -0.215162
+-0.810447 0.367209 0.415751 -0.188374
+-0.832728 0.313417 0.427181 -0.160779
+-0.851444 0.258283 0.436782 -0.132496
+-0.866513 0.202043 0.444512 -0.103646
+-0.877872 0.144937 0.450339 -0.0743513
+-0.885472 0.0872113 0.454238 -0.0447385
+-0.88928 0.0291121 0.456191 -0.0149342
+0.456191 0.0149342 0.029112 -0.88928
+0.454238 0.0447385 0.0872114 -0.885472
+0.450339 0.0743513 0.144937 -0.877872
+0.444512 0.103646 0.202043 -0.866513
+0.436782 0.132496 0.258283 -0.851444
+0.427181 0.160779 0.313417 -0.832728
+0.415751 0.188374 0.367209 -0.810447
+0.40254 0.215162 0.419428 -0.784695
+0.387606 0.241029 0.469852 -0.755583
+0.371012 0.265863 0.518263 -0.723236
+0.352829 0.28956 0.564456 -0.687791
+0.333136 0.312016 0.608231 -0.649401
+0.312016 0.333136 0.649401 -0.608231
+0.28956 0.352829 0.687791 -0.564456
+0.265863 0.371012 0.723236 -0.518263
+0.241029 0.387606 0.755583 -0.469852
+0.215162 0.40254 0.784695 -0.419428
+0.188374 0.415751 0.810447 -0.367209
+0.160779 0.427181 0.832728 -0.313417
+0.132496 0.436782 0.851444 -0.258283
+0.103646 0.444512 0.866513 -0.202043
+0.0743512 0.450339 0.877872 -0.144937
+0.0447384 0.454238 0.885472 -0.0872113
+0.0149341 0.456191 0.88928 -0.0291119
+-0.0149342 0.456191 0.88928 0.0291121
+-0.0447385 0.454238 0.885472 0.0872115
+-0.0743513 0.450339 0.877872 0.144937
+-0.103646 0.444512 0.866513 0.202043
+-0.132496 0.436781 0.851444 0.258283
+-0.160779 0.427181 0.832728 0.313417
+-0.188374 0.415751 0.810447 0.367209
+-0.215162 0.40254 0.784695 0.419428
+-0.241029 0.387606 0.755583 0.469852
+-0.265864 0.371012 0.723236 0.518263
+-0.28956 0.352829 0.687791 0.564456
+-0.312016 0.333136 0.649401 0.608231
+-0.333136 0.312016 0.608231 0.649401
+-0.352829 0.28956 0.564456 0.687791
+-0.371012 0.265864 0.518263 0.723236
+-0.387606 0.241029 0.469852 0.755583
+-0.40254 0.215162 0.419428 0.784695
+-0.415751 0.188374 0.367209 0.810447
+-0.427181 0.160779 0.313417 0.832728
+-0.436782 0.132496 0.258283 0.851444
+-0.444512 0.103646 0.202043 0.866513
+-0.450339 0.0743513 0.144937 0.877872
+-0.454238 0.0447385 0.0872113 0.885472
+-0.456191 0.0149342 0.0291121 0.88928
+0.499732 0.0163595 0.113039 -0.858616
+0.497592 0.0490086 0.168953 -0.849385
+0.493322 0.0814477 0.224144 -0.836516
+0.486938 0.113538 0.278375 -0.820066
+0.47847 0.145142 0.331414 -0.800103
+0.467953 0.176125 0.383033 -0.776715
+0.455432 0.206354 0.433013 -0.75
+0.440961 0.235698 0.481138 -0.720074
+0.424601 0.264034 0.527203 -0.687064
+0.406423 0.291239 0.57101 -0.651112
+0.386505 0.317197 0.612372 -0.612372
+0.364932 0.341796 0.651112 -0.57101
+0.341796 0.364932 0.687064 -0.527203
+0.317197 0.386505 0.720074 -0.481138
+0.291239 0.406423 0.75 -0.433013
+0.264034 0.424601 0.776715 -0.383033
+0.235698 0.440961 0.800103 -0.331414
+0.206353 0.455432 0.820066 -0.278375
+0.176125 0.467953 0.836516 -0.224144
+0.145142 0.47847 0.849385 -0.168953
+0.113538 0.486938 0.858616 -0.113039
+0.0814477 0.493322 0.864171 -0.0566407
+0.0490085 0.497592 0.866025 1.06466e-07
+0.0163595 0.499732 0.864171 0.0566409
+-0.0163596 0.499732 0.858616 0.113039
+-0.0490086 0.497592 0.849385 0.168953
+-0.0814478 0.493322 0.836516 0.224144
+-0.113538 0.486938 0.820066 0.278375
+-0.145142 0.47847 0.800103 0.331414
+-0.176125 0.467953 0.776715 0.383033
+-0.206354 0.455432 0.75 0.433013
+-0.235698 0.440961 0.720074 0.481138
+-0.264034 0.424601 0.687064 0.527203
+-0.291239 0.406423 0.651112 0.57101
+-0.317197 0.386505 0.612372 0.612372
+-0.341796 0.364932 0.57101 0.651112
+-0.364932 0.341796 0.527203 0.687064
+-0.386505 0.317197 0.481138 0.720074
+-0.406423 0.291239 0.433013 0.75
+-0.424601 0.264034 0.383033 0.776715
+-0.440961 0.235698 0.331414 0.800103
+-0.455432 0.206354 0.278375 0.820066
+-0.467953 0.176125 0.224144 0.836516
+-0.47847 0.145142 0.168953 0.849385
+-0.486938 0.113538 0.113039 0.858616
+-0.493322 0.0814478 0.0566408 0.864171
+-0.497592 0.0490085 -4.10824e-08 0.866025
+-0.499732 0.0163596 -0.0566407 0.864171
+0.499732 0.0163595 -0.0566408 -0.864171
+0.497592 0.0490086 2.36595e-09 -0.866025
+0.493322 0.0814477 0.0566408 -0.864171
+0.486938 0.113538 0.113039 -0.858616
+0.47847 0.145142 0.168953 -0.849385
+0.467953 0.176125 0.224144 -0.836516
+0.455432 0.206354 0.278375 -0.820066
+0.440961 0.235698 0.331414 -0.800103
+0.424601 0.264034 0.383033 -0.776715
+0.406423 0.291239 0.433013 -0.75
+0.386505 0.317197 0.481138 -0.720074
+0.364932 0.341796 0.527203 -0.687064
+0.341796 0.364932 0.57101 -0.651112
+0.317197 0.386505 0.612372 -0.612372
+0.291239 0.406423 0.651112 -0.57101
+0.264034 0.424601 0.687064 -0.527203
+0.235698 0.440961 0.720074 -0.481138
+0.206353 0.455432 0.75 -0.433013
+0.176125 0.467953 0.776715 -0.383033
+0.145142 0.47847 0.800103 -0.331414
+0.113538 0.486938 0.820066 -0.278375
+0.0814477 0.493322 0.836516 -0.224144
+0.0490085 0.497592 0.849385 -0.168953
+0.0163595 0.499732 0.858616 -0.113039
+-0.0163596 0.499732 0.864171 -0.0566407
+-0.0490086 0.497592 0.866025 7.24831e-08
+-0.0814478 0.493322 0.864171 0.0566408
+-0.113538 0.486938 0.858616 0.113039
+-0.145142 0.47847 0.849385 0.168953
+-0.176125 0.467953 0.836516 0.224144
+-0.206354 0.455432 0.820066 0.278375
+-0.235698 0.440961 0.800103 0.331413
+-0.264034 0.424601 0.776715 0.383033
+-0.291239 0.406423 0.75 0.433013
+-0.317197 0.386505 0.720074 0.481138
+-0.341796 0.364932 0.687064 0.527203
+-0.364932 0.341796 0.651112 0.57101
+-0.386505 0.317197 0.612372 0.612372
+-0.406423 0.291239 0.57101 0.651112
+-0.424601 0.264034 0.527203 0.687064
+-0.440961 0.235698 0.481138 0.720074
+-0.455432 0.206354 0.433013 0.75
+-0.467953 0.176125 0.383033 0.776715
+-0.47847 0.145142 0.331414 0.800103
+-0.486938 0.113538 0.278375 0.820066
+-0.493322 0.0814478 0.224144 0.836516
+-0.497592 0.0490085 0.168953 0.849385
+-0.499732 0.0163596 0.113039 0.858616
+0.539773 0.0176703 0.0275372 -0.841175
+0.537461 0.0529353 0.0824937 -0.837573
+0.532848 0.0879736 0.137097 -0.830384
+0.525954 0.122635 0.191113 -0.81964
+0.516807 0.156772 0.244311 -0.805385
+0.505447 0.190237 0.296463 -0.787682
+0.491923 0.222887 0.347345 -0.766606
+0.476292 0.254583 0.396739 -0.742247
+0.458622 0.285189 0.444435 -0.71471
+0.438987 0.314574 0.490228 -0.684112
+0.417473 0.342612 0.533921 -0.650585
+0.394172 0.369182 0.575329 -0.614272
+0.369182 0.394172 0.614272 -0.575329
+0.342612 0.417473 0.650585 -0.533922
+0.314574 0.438987 0.684112 -0.490228
+0.285189 0.458622 0.71471 -0.444435
+0.254583 0.476292 0.742247 -0.396739
+0.222887 0.491923 0.766606 -0.347345
+0.190237 0.505447 0.787682 -0.296463
+0.156772 0.516807 0.805385 -0.244311
+0.122635 0.525954 0.81964 -0.191113
+0.0879735 0.532848 0.830384 -0.137097
+0.0529352 0.537461 0.837573 -0.0824936
+0.0176703 0.539773 0.841175 -0.0275371
+-0.0176704 0.539773 0.841175 0.0275373
+-0.0529354 0.537461 0.837573 0.0824938
+-0.0879736 0.532848 0.830384 0.137097
+-0.122635 0.525954 0.81964 0.191113
+-0.156772 0.516807 0.805385 0.244311
+-0.190237 0.505447 0.787682 0.296463
+-0.222887 0.491923 0.766606 0.347345
+-0.254583 0.476292 0.742247 0.396739
+-0.285189 0.458622 0.71471 0.444435
+-0.314574 0.438987 0.684112 0.490228
+-0.342612 0.417473 0.650585 0.533921
+-0.369182 0.394172 0.614272 0.575329
+-0.394172 0.369182 0.575329 0.614272
+-0.417473 0.342612 0.533921 0.650585
+-0.438987 0.314574 0.490228 0.684112
+-0.458622 0.285189 0.444435 0.71471
+-0.476292 0.254583 0.39674 0.742247
+-0.491923 0.222887 0.347345 0.766606
+-0.505447 0.190237 0.296463 0.787682
+-0.516807 0.156772 0.244311 0.805385
+-0.525954 0.122635 0.191113 0.81964
+-0.532848 0.0879736 0.137097 0.830384
+-0.537461 0.0529353 0.0824937 0.837573
+-0.539773 0.0176704 0.0275373 0.841175
+0.539773 0.0176703 0.191113 -0.81964
+0.537461 0.0529353 0.244311 -0.805385
+0.532848 0.0879736 0.296463 -0.787682
+0.525954 0.122635 0.347345 -0.766606
+0.516807 0.156772 0.396739 -0.742247
+0.505447 0.190237 0.444435 -0.71471
+0.491923 0.222887 0.490228 -0.684112
+0.476292 0.254583 0.533922 -0.650585
+0.458622 0.285189 0.575329 -0.614272
+0.438987 0.314574 0.614272 -0.575329
+0.417473 0.342612 0.650585 -0.533922
+0.394172 0.369182 0.684112 -0.490228
+0.369182 0.394172 0.71471 -0.444435
+0.342612 0.417473 0.742247 -0.396739
+0.314574 0.438987 0.766606 -0.347345
+0.285189 0.458622 0.787682 -0.296463
+0.254583 0.476292 0.805385 -0.244311
+0.222887 0.491923 0.81964 -0.191113
+0.190237 0.505447 0.830384 -0.137097
+0.156772 0.516807 0.837573 -0.0824937
+0.122635 0.525954 0.841175 -0.0275372
+0.0879735 0.532848 0.841175 0.0275373
+0.0529352 0.537461 0.837573 0.0824938
+0.0176703 0.539773 0.830384 0.137097
+-0.0176704 0.539773 0.81964 0.191113
+-0.0529354 0.537461 0.805385 0.244311
+-0.0879736 0.532848 0.787682 0.296463
+-0.122635 0.525954 0.766606 0.347345
+-0.156772 0.516807 0.742247 0.39674
+-0.190237 0.505447 0.71471 0.444435
+-0.222887 0.491923 0.684112 0.490228
+-0.254583 0.476292 0.650585 0.533921
+-0.285189 0.458622 0.614272 0.575329
+-0.314574 0.438987 0.575329 0.614272
+-0.342612 0.417473 0.533922 0.650585
+-0.369182 0.394172 0.490228 0.684112
+-0.394172 0.369182 0.444435 0.71471
+-0.417473 0.342612 0.396739 0.742247
+-0.438987 0.314574 0.347345 0.766606
+-0.458622 0.285189 0.296463 0.787682
+-0.476292 0.254583 0.244311 0.805385
+-0.491923 0.222887 0.191113 0.81964
+-0.505447 0.190237 0.137097 0.830384
+-0.516807 0.156772 0.0824937 0.837573
+-0.525954 0.122635 0.0275371 0.841175
+-0.532848 0.0879736 -0.0275372 0.841175
+-0.537461 0.0529353 -0.0824938 0.837573
+-0.539773 0.0176704 -0.137097 0.830384
+0.577041 0.0188904 0.262454 -0.773165
+0.57457 0.0565902 0.31246 -0.754344
+0.569639 0.0940477 0.361127 -0.732294
+0.562268 0.131103 0.408248 -0.707107
+0.55249 0.167596 0.453621 -0.678892
+0.540346 0.203372 0.497052 -0.64777
+0.525887 0.238277 0.538354 -0.613875
+0.509177 0.272161 0.57735 -0.57735
+0.490287 0.30488 0.613875 -0.538354
+0.469297 0.336294 0.64777 -0.497052
+0.446298 0.366267 0.678892 -0.453621
+0.421387 0.394672 0.707107 -0.408248
+0.394672 0.421387 0.732294 -0.361127
+0.366267 0.446298 0.754344 -0.31246
+0.336294 0.469297 0.773165 -0.262454
+0.30488 0.490287 0.788675 -0.211325
+0.272161 0.509178 0.800808 -0.159291
+0.238276 0.525887 0.809511 -0.106574
+0.203372 0.540346 0.814748 -0.0534014
+0.167596 0.55249 0.816497 3.50817e-08
+0.131103 0.562268 0.814748 0.0534015
+0.0940477 0.569639 0.809511 0.106574
+0.0565902 0.57457 0.800808 0.159291
+0.0188903 0.577041 0.788675 0.211325
+-0.0188904 0.577041 0.773165 0.262454
+-0.0565903 0.57457 0.754344 0.31246
+-0.0940478 0.569639 0.732294 0.361127
+-0.131103 0.562268 0.707107 0.408248
+-0.167596 0.55249 0.678892 0.453621
+-0.203372 0.540346 0.64777 0.497052
+-0.238277 0.525887 0.613875 0.538354
+-0.272161 0.509178 0.57735 0.57735
+-0.30488 0.490287 0.538354 0.613875
+-0.336294 0.469297 0.497052 0.64777
+-0.366267 0.446298 0.453621 0.678892
+-0.394672 0.421387 0.408248 0.707107
+-0.421387 0.394672 0.361127 0.732294
+-0.446298 0.366267 0.31246 0.754344
+-0.469297 0.336294 0.262454 0.773165
+-0.490287 0.30488 0.211325 0.788675
+-0.509177 0.272161 0.159291 0.800808
+-0.525887 0.238277 0.106574 0.809511
+-0.540346 0.203372 0.0534014 0.814748
+-0.55249 0.167596 2.65621e-08 0.816497
+-0.562268 0.131103 -0.0534015 0.814748
+-0.569639 0.0940478 -0.106574 0.809511
+-0.57457 0.0565902 -0.159291 0.800808
+-0.577041 0.0188904 -0.211325 0.788675
+0.577041 0.0188904 0.106574 -0.809511
+0.57457 0.0565902 0.159291 -0.800808
+0.569639 0.0940477 0.211325 -0.788675
+0.562268 0.131103 0.262454 -0.773165
+0.55249 0.167596 0.31246 -0.754344
+0.540346 0.203372 0.361127 -0.732294
+0.525887 0.238277 0.408248 -0.707107
+0.509177 0.272161 0.453621 -0.678892
+0.490287 0.30488 0.497052 -0.64777
+0.469297 0.336294 0.538354 -0.613875
+0.446298 0.366267 0.57735 -0.57735
+0.421387 0.394672 0.613875 -0.538354
+0.394672 0.421387 0.64777 -0.497052
+0.366267 0.446298 0.678892 -0.453621
+0.336294 0.469297 0.707107 -0.408248
+0.30488 0.490287 0.732294 -0.361127
+0.272161 0.509178 0.754345 -0.31246
+0.238276 0.525887 0.773165 -0.262454
+0.203372 0.540346 0.788675 -0.211325
+0.167596 0.55249 0.800808 -0.159291
+0.131103 0.562268 0.809511 -0.106574
+0.0940477 0.569639 0.814748 -0.0534013
+0.0565902 0.57457 0.816497 1.00377e-07
+0.0188903 0.577041 0.814748 0.0534015
+-0.0188904 0.577041 0.809511 0.106574
+-0.0565903 0.57457 0.800808 0.159291
+-0.0940478 0.569639 0.788675 0.211325
+-0.131103 0.562268 0.773165 0.262454
+-0.167596 0.55249 0.754344 0.31246
+-0.203372 0.540346 0.732293 0.361127
+-0.238277 0.525887 0.707107 0.408248
+-0.272161 0.509178 0.678892 0.453621
+-0.30488 0.490287 0.64777 0.497052
+-0.336294 0.469297 0.613875 0.538354
+-0.366267 0.446298 0.57735 0.57735
+-0.394672 0.421387 0.538354 0.613875
+-0.421387 0.394672 0.497052 0.64777
+-0.446298 0.366267 0.453621 0.678892
+-0.469297 0.336294 0.408248 0.707107
+-0.490287 0.30488 0.361127 0.732294
+-0.509177 0.272161 0.31246 0.754344
+-0.525887 0.238277 0.262454 0.773165
+-0.540346 0.203372 0.211325 0.788675
+-0.55249 0.167596 0.159291 0.800808
+-0.562268 0.131103 0.106574 0.809511
+-0.569639 0.0940478 0.0534015 0.814748
+-0.57457 0.0565902 -3.87329e-08 0.816497
+-0.577041 0.0188904 -0.0534014 0.814748
+0.612045 0.0200363 0.17952 -0.769917
+0.609424 0.060023 0.22949 -0.756528
+0.604193 0.0997527 0.278478 -0.739899
+0.596375 0.139055 0.326274 -0.720101
+0.586004 0.177762 0.372672 -0.69722
+0.573123 0.215708 0.417474 -0.671353
+0.557788 0.25273 0.460489 -0.642612
+0.540064 0.28867 0.501532 -0.611118
+0.520028 0.323374 0.540427 -0.577008
+0.497765 0.356693 0.577008 -0.540427
+0.47337 0.388485 0.611118 -0.501532
+0.446949 0.418613 0.642612 -0.460489
+0.418613 0.446949 0.671353 -0.417474
+0.388485 0.47337 0.69722 -0.372672
+0.356693 0.497765 0.720101 -0.326274
+0.323374 0.520028 0.739899 -0.278478
+0.28867 0.540064 0.756528 -0.22949
+0.25273 0.557788 0.769917 -0.179519
+0.215708 0.573123 0.78001 -0.12878
+0.177762 0.586004 0.786763 -0.0774893
+0.139055 0.596375 0.790146 -0.0258667
+0.0997526 0.604193 0.790146 0.0258668
+0.0600229 0.609424 0.786763 0.0774895
+0.0200362 0.612045 0.78001 0.12878
+-0.0200363 0.612045 0.769917 0.17952
+-0.060023 0.609424 0.756528 0.22949
+-0.0997527 0.604193 0.739899 0.278478
+-0.139055 0.596375 0.720101 0.326274
+-0.177762 0.586004 0.69722 0.372672
+-0.215708 0.573123 0.671353 0.417474
+-0.25273 0.557788 0.642612 0.460489
+-0.28867 0.540064 0.611118 0.501532
+-0.323374 0.520028 0.577008 0.540427
+-0.356693 0.497765 0.540427 0.577008
+-0.388485 0.47337 0.501532 0.611118
+-0.418613 0.446949 0.460489 0.642612
+-0.446949 0.418613 0.417474 0.671353
+-0.47337 0.388485 0.372672 0.69722
+-0.497765 0.356693 0.326274 0.720101
+-0.520028 0.323374 0.278478 0.739899
+-0.540064 0.28867 0.22949 0.756528
+-0.557788 0.25273 0.17952 0.769917
+-0.573123 0.215708 0.12878 0.78001
+-0.586004 0.177762 0.0774894 0.786763
+-0.596375 0.139055 0.0258666 0.790146
+-0.604193 0.0997527 -0.0258667 0.790146
+-0.609424 0.060023 -0.0774894 0.786763
+-0.612045 0.0200363 -0.12878 0.78001
+0.539773 0.0176703 -0.137097 -0.830384
+0.537461 0.0529353 -0.0824937 -0.837573
+0.532848 0.0879736 -0.0275372 -0.841175
+0.525954 0.122635 0.0275372 -0.841175
+0.516807 0.156772 0.0824937 -0.837573
+0.505447 0.190237 0.137097 -0.830384
+0.491923 0.222887 0.191113 -0.81964
+0.476292 0.254583 0.244311 -0.805385
+0.458622 0.285189 0.296463 -0.787682
+0.438987 0.314574 0.347345 -0.766606
+0.417473 0.342612 0.396739 -0.742247
+0.394172 0.369182 0.444435 -0.71471
+0.369182 0.394172 0.490228 -0.684112
+0.342612 0.417473 0.533922 -0.650585
+0.314574 0.438987 0.575329 -0.614272
+0.285189 0.458622 0.614272 -0.575329
+0.254583 0.476292 0.650585 -0.533921
+0.222887 0.491923 0.684112 -0.490228
+0.190237 0.505447 0.71471 -0.444435
+0.156772 0.516807 0.742247 -0.396739
+0.122635 0.525954 0.766606 -0.347345
+0.0879735 0.532848 0.787682 -0.296462
+0.0529352 0.537461 0.805385 -0.244311
+0.0176703 0.539773 0.81964 -0.191113
+-0.0176704 0.539773 0.830384 -0.137097
+-0.0529354 0.537461 0.837573 -0.0824936
+-0.0879736 0.532848 0.841175 -0.0275372
+-0.122635 0.525954 0.841175 0.0275373
+-0.156772 0.516807 0.837573 0.0824939
+-0.190237 0.505447 0.830384 0.137097
+-0.222887 0.491923 0.81964 0.191113
+-0.254583 0.476292 0.805385 0.244311
+-0.285189 0.458622 0.787682 0.296463
+-0.314574 0.438987 0.766606 0.347345
+-0.342612 0.417473 0.742247 0.396739
+-0.369182 0.394172 0.71471 0.444435
+-0.394172 0.369182 0.684112 0.490228
+-0.417473 0.342612 0.650585 0.533922
+-0.438987 0.314574 0.614272 0.575329
+-0.458622 0.285189 0.575329 0.614272
+-0.476292 0.254583 0.533922 0.650585
+-0.491923 0.222887 0.490228 0.684112
+-0.505447 0.190237 0.444435 0.71471
+-0.516807 0.156772 0.396739 0.742247
+-0.525954 0.122635 0.347345 0.766606
+-0.532848 0.0879736 0.296463 0.787682
+-0.537461 0.0529353 0.244311 0.805385
+-0.539773 0.0176704 0.191113 0.81964
+0.577041 0.0188904 -0.0534014 -0.814748
+0.57457 0.0565902 2.23064e-09 -0.816497
+0.569639 0.0940477 0.0534014 -0.814748
+0.562268 0.131103 0.106574 -0.809511
+0.55249 0.167596 0.159291 -0.800808
+0.540346 0.203372 0.211325 -0.788675
+0.525887 0.238277 0.262454 -0.773165
+0.509177 0.272161 0.31246 -0.754344
+0.490287 0.30488 0.361127 -0.732294
+0.469297 0.336294 0.408248 -0.707107
+0.446298 0.366267 0.453621 -0.678892
+0.421387 0.394672 0.497052 -0.64777
+0.394672 0.421387 0.538354 -0.613875
+0.366267 0.446298 0.57735 -0.57735
+0.336294 0.469297 0.613875 -0.538354
+0.30488 0.490287 0.64777 -0.497052
+0.272161 0.509178 0.678892 -0.453621
+0.238276 0.525887 0.707107 -0.408248
+0.203372 0.540346 0.732294 -0.361127
+0.167596 0.55249 0.754344 -0.31246
+0.131103 0.562268 0.773165 -0.262454
+0.0940477 0.569639 0.788675 -0.211325
+0.0565902 0.57457 0.800808 -0.15929
+0.0188903 0.577041 0.809511 -0.106574
+-0.0188904 0.577041 0.814748 -0.0534014
+-0.0565903 0.57457 0.816497 6.83377e-08
+-0.0940478 0.569639 0.814748 0.0534015
+-0.131103 0.562268 0.809511 0.106574
+-0.167596 0.55249 0.800808 0.159291
+-0.203372 0.540346 0.788675 0.211325
+-0.238277 0.525887 0.773165 0.262454
+-0.272161 0.509178 0.754345 0.31246
+-0.30488 0.490287 0.732294 0.361127
+-0.336294 0.469297 0.707107 0.408248
+-0.366267 0.446298 0.678892 0.453621
+-0.394672 0.421387 0.64777 0.497052
+-0.421387 0.394672 0.613875 0.538354
+-0.446298 0.366267 0.57735 0.57735
+-0.469297 0.336294 0.538354 0.613875
+-0.490287 0.30488 0.497052 0.64777
+-0.509177 0.272161 0.453621 0.678892
+-0.525887 0.238277 0.408248 0.707107
+-0.540346 0.203372 0.361127 0.732294
+-0.55249 0.167596 0.31246 0.754344
+-0.562268 0.131103 0.262454 0.773165
+-0.569639 0.0940478 0.211325 0.788675
+-0.57457 0.0565902 0.159291 0.800808
+-0.577041 0.0188904 0.106574 0.809511
+0.577041 0.0188904 -0.211325 -0.788675
+0.57457 0.0565902 -0.159291 -0.800808
+0.569639 0.0940477 -0.106574 -0.809511
+0.562268 0.131103 -0.0534014 -0.814748
+0.55249 0.167596 6.08539e-10 -0.816497
+0.540346 0.203372 0.0534014 -0.814748
+0.525887 0.238277 0.106574 -0.809511
+0.509177 0.272161 0.159291 -0.800808
+0.490287 0.30488 0.211325 -0.788675
+0.469297 0.336294 0.262454 -0.773165
+0.446298 0.366267 0.31246 -0.754344
+0.421387 0.394672 0.361127 -0.732294
+0.394672 0.421387 0.408248 -0.707107
+0.366267 0.446298 0.453621 -0.678892
+0.336294 0.469297 0.497052 -0.64777
+0.30488 0.490287 0.538354 -0.613875
+0.272161 0.509178 0.57735 -0.57735
+0.238276 0.525887 0.613875 -0.538354
+0.203372 0.540346 0.64777 -0.497052
+0.167596 0.55249 0.678892 -0.453621
+0.131103 0.562268 0.707107 -0.408248
+0.0940477 0.569639 0.732294 -0.361127
+0.0565902 0.57457 0.754345 -0.31246
+0.0188903 0.577041 0.773165 -0.262454
+-0.0188904 0.577041 0.788675 -0.211325
+-0.0565903 0.57457 0.800808 -0.159291
+-0.0940478 0.569639 0.809511 -0.106574
+-0.131103 0.562268 0.814748 -0.0534014
+-0.167596 0.55249 0.816497 1.33633e-07
+-0.203372 0.540346 0.814748 0.0534016
+-0.238277 0.525887 0.809511 0.106574
+-0.272161 0.509178 0.800808 0.15929
+-0.30488 0.490287 0.788675 0.211325
+-0.336294 0.469297 0.773165 0.262454
+-0.366267 0.446298 0.754344 0.31246
+-0.394672 0.421387 0.732294 0.361127
+-0.421387 0.394672 0.707107 0.408248
+-0.446298 0.366267 0.678892 0.453621
+-0.469297 0.336294 0.64777 0.497052
+-0.490287 0.30488 0.613875 0.538354
+-0.509177 0.272161 0.57735 0.57735
+-0.525887 0.238277 0.538354 0.613875
+-0.540346 0.203372 0.497052 0.64777
+-0.55249 0.167596 0.453621 0.678892
+-0.562268 0.131103 0.408248 0.707107
+-0.569639 0.0940478 0.361127 0.732294
+-0.57457 0.0565902 0.31246 0.754344
+-0.577041 0.0188904 0.262454 0.773165
+0.612045 0.0200363 -0.12878 -0.78001
+0.609424 0.060023 -0.0774894 -0.786763
+0.604193 0.0997527 -0.0258667 -0.790146
+0.596375 0.139055 0.0258667 -0.790146
+0.586004 0.177762 0.0774894 -0.786763
+0.573123 0.215708 0.12878 -0.78001
+0.557788 0.25273 0.17952 -0.769917
+0.540064 0.28867 0.22949 -0.756528
+0.520028 0.323374 0.278478 -0.739899
+0.497765 0.356693 0.326274 -0.720101
+0.47337 0.388485 0.372672 -0.69722
+0.446949 0.418613 0.417474 -0.671353
+0.418613 0.446949 0.460489 -0.642612
+0.388485 0.47337 0.501532 -0.611118
+0.356693 0.497765 0.540427 -0.577008
+0.323374 0.520028 0.577008 -0.540427
+0.28867 0.540064 0.611118 -0.501532
+0.25273 0.557788 0.642612 -0.460489
+0.215708 0.573123 0.671353 -0.417474
+0.177762 0.586004 0.69722 -0.372672
+0.139055 0.596375 0.720101 -0.326274
+0.0997526 0.604193 0.739899 -0.278478
+0.0600229 0.609424 0.756528 -0.22949
+0.0200362 0.612045 0.769917 -0.179519
+-0.0200363 0.612045 0.78001 -0.12878
+-0.060023 0.609424 0.786763 -0.0774893
+-0.0997527 0.604193 0.790146 -0.0258667
+-0.139055 0.596375 0.790146 0.0258668
+-0.177762 0.586004 0.786763 0.0774895
+-0.215708 0.573123 0.78001 0.12878
+-0.25273 0.557788 0.769917 0.17952
+-0.28867 0.540064 0.756528 0.22949
+-0.323374 0.520028 0.739899 0.278478
+-0.356693 0.497765 0.720101 0.326274
+-0.388485 0.47337 0.69722 0.372672
+-0.418613 0.446949 0.671353 0.417474
+-0.446949 0.418613 0.642612 0.460489
+-0.47337 0.388485 0.611118 0.501532
+-0.497765 0.356693 0.577008 0.540427
+-0.520028 0.323374 0.540427 0.577008
+-0.540064 0.28867 0.501532 0.611118
+-0.557788 0.25273 0.460489 0.642612
+-0.573123 0.215708 0.417474 0.671353
+-0.586004 0.177762 0.372672 0.69722
+-0.596375 0.139055 0.326274 0.720101
+-0.604193 0.0997527 0.278478 0.739899
+-0.609424 0.060023 0.22949 0.756528
+-0.612045 0.0200363 0.17952 0.769917
+0.612045 0.0200363 0.0258667 -0.790146
+0.609424 0.060023 0.0774894 -0.786763
+0.604193 0.0997527 0.12878 -0.78001
+0.596375 0.139055 0.17952 -0.769917
+0.586004 0.177762 0.22949 -0.756528
+0.573123 0.215708 0.278478 -0.739899
+0.557788 0.25273 0.326274 -0.720101
+0.540064 0.28867 0.372672 -0.69722
+0.520028 0.323374 0.417474 -0.671353
+0.497765 0.356693 0.460489 -0.642612
+0.47337 0.388485 0.501532 -0.611118
+0.446949 0.418613 0.540427 -0.577008
+0.418613 0.446949 0.577008 -0.540427
+0.388485 0.47337 0.611118 -0.501532
+0.356693 0.497765 0.642612 -0.460489
+0.323374 0.520028 0.671353 -0.417474
+0.28867 0.540064 0.69722 -0.372672
+0.25273 0.557788 0.720101 -0.326274
+0.215708 0.573123 0.739899 -0.278478
+0.177762 0.586004 0.756528 -0.22949
+0.139055 0.596375 0.769917 -0.17952
+0.0997526 0.604193 0.78001 -0.12878
+0.0600229 0.609424 0.786763 -0.0774893
+0.0200362 0.612045 0.790146 -0.0258666
+-0.0200363 0.612045 0.790146 0.0258668
+-0.060023 0.609424 0.786763 0.0774894
+-0.0997527 0.604193 0.78001 0.12878
+-0.139055 0.596375 0.769917 0.17952
+-0.177762 0.586004 0.756528 0.22949
+-0.215708 0.573123 0.739899 0.278478
+-0.25273 0.557788 0.720101 0.326274
+-0.28867 0.540064 0.69722 0.372672
+-0.323374 0.520028 0.671353 0.417474
+-0.356693 0.497765 0.642612 0.460489
+-0.388485 0.47337 0.611118 0.501532
+-0.418613 0.446949 0.577008 0.540427
+-0.446949 0.418613 0.540427 0.577008
+-0.47337 0.388485 0.501532 0.611118
+-0.497765 0.356693 0.460489 0.642612
+-0.520028 0.323374 0.417474 0.671353
+-0.540064 0.28867 0.372672 0.69722
+-0.557788 0.25273 0.326274 0.720101
+-0.573123 0.215708 0.278478 0.739899
+-0.586004 0.177762 0.22949 0.756528
+-0.596375 0.139055 0.179519 0.769917
+-0.604193 0.0997527 0.12878 0.78001
+-0.609424 0.060023 0.0774893 0.786763
+-0.612045 0.0200363 0.0258668 0.790146
+0.645152 0.0211201 0.099691 -0.757229
+0.642389 0.0632698 0.149003 -0.749087
+0.636876 0.105149 0.197676 -0.737738
+0.628635 0.146577 0.245503 -0.72323
+0.617702 0.187378 0.292279 -0.705625
+0.604125 0.227376 0.337804 -0.684998
+0.58796 0.266401 0.381881 -0.661438
+0.569278 0.304285 0.424324 -0.635045
+0.548158 0.340866 0.464949 -0.605934
+0.52469 0.375988 0.503584 -0.574227
+0.498976 0.409499 0.540062 -0.540062
+0.471125 0.441257 0.574227 -0.503584
+0.441257 0.471125 0.605934 -0.464949
+0.409499 0.498976 0.635045 -0.424324
+0.375988 0.52469 0.661438 -0.381881
+0.340866 0.548158 0.684998 -0.337804
+0.304285 0.569278 0.705625 -0.292279
+0.266401 0.58796 0.72323 -0.245503
+0.227376 0.604125 0.737738 -0.197676
+0.187378 0.617702 0.749087 -0.149003
+0.146577 0.628635 0.757229 -0.099691
+0.105148 0.636876 0.762127 -0.0499524
+0.0632697 0.642389 0.763763 9.38938e-08
+0.02112 0.645152 0.762127 0.0499525
+-0.0211201 0.645152 0.757229 0.0996911
+-0.0632698 0.642389 0.749087 0.149003
+-0.105149 0.636876 0.737738 0.197676
+-0.146577 0.628635 0.72323 0.245503
+-0.187378 0.617702 0.705625 0.292279
+-0.227377 0.604125 0.684998 0.337804
+-0.266401 0.58796 0.661438 0.381881
+-0.304285 0.569278 0.635045 0.424324
+-0.340866 0.548158 0.605934 0.464949
+-0.375988 0.52469 0.574227 0.503584
+-0.409499 0.498976 0.540062 0.540062
+-0.441257 0.471125 0.503584 0.574227
+-0.471125 0.441257 0.464949 0.605934
+-0.498976 0.409499 0.424324 0.635045
+-0.52469 0.375988 0.381881 0.661438
+-0.548158 0.340866 0.337804 0.684998
+-0.569278 0.304285 0.292279 0.705625
+-0.58796 0.266401 0.245503 0.72323
+-0.604125 0.227376 0.197676 0.737738
+-0.617702 0.187378 0.149003 0.749087
+-0.628635 0.146577 0.099691 0.757229
+-0.636876 0.105149 0.0499525 0.762127
+-0.642389 0.0632698 -3.62313e-08 0.763763
+-0.645152 0.0211201 -0.0499524 0.762127
+0.645152 0.0211201 -0.0499525 -0.762127
+0.642389 0.0632698 2.08657e-09 -0.763763
+0.636876 0.105149 0.0499525 -0.762127
+0.628635 0.146577 0.099691 -0.757229
+0.617702 0.187378 0.149003 -0.749087
+0.604125 0.227376 0.197676 -0.737738
+0.58796 0.266401 0.245503 -0.72323
+0.569278 0.304285 0.292279 -0.705625
+0.548158 0.340866 0.337804 -0.684998
+0.52469 0.375988 0.381881 -0.661438
+0.498976 0.409499 0.424324 -0.635045
+0.471125 0.441257 0.464949 -0.605934
+0.441257 0.471125 0.503584 -0.574227
+0.409499 0.498976 0.540062 -0.540062
+0.375988 0.52469 0.574227 -0.503584
+0.340866 0.548158 0.605934 -0.464949
+0.304285 0.569278 0.635045 -0.424324
+0.266401 0.58796 0.661438 -0.381881
+0.227376 0.604125 0.684998 -0.337804
+0.187378 0.617702 0.705625 -0.292279
+0.146577 0.628635 0.72323 -0.245503
+0.105148 0.636876 0.737738 -0.197676
+0.0632697 0.642389 0.749087 -0.149003
+0.02112 0.645152 0.757229 -0.0996909
+-0.0211201 0.645152 0.762127 -0.0499524
+-0.0632698 0.642389 0.763763 6.39241e-08
+-0.105149 0.636876 0.762127 0.0499525
+-0.146577 0.628635 0.757229 0.0996911
+-0.187378 0.617702 0.749087 0.149003
+-0.227377 0.604125 0.737738 0.197676
+-0.266401 0.58796 0.72323 0.245504
+-0.304285 0.569278 0.705625 0.292279
+-0.340866 0.548158 0.684998 0.337804
+-0.375988 0.52469 0.661438 0.381881
+-0.409499 0.498976 0.635045 0.424324
+-0.441257 0.471125 0.605934 0.464949
+-0.471125 0.441257 0.574227 0.503584
+-0.498976 0.409499 0.540062 0.540062
+-0.52469 0.375988 0.503584 0.574227
+-0.548158 0.340866 0.464949 0.605934
+-0.569278 0.304285 0.424324 0.635045
+-0.58796 0.266401 0.381881 0.661438
+-0.604125 0.227376 0.337803 0.684998
+-0.617702 0.187378 0.292279 0.705625
+-0.628635 0.146577 0.245503 0.72323
+-0.636876 0.105149 0.197676 0.737738
+-0.642389 0.0632698 0.149003 0.749087
+-0.645152 0.0211201 0.0996911 0.757229
+0.676641 0.0221509 0.0240806 -0.735586
+0.673743 0.0663579 0.0721387 -0.732436
+0.667961 0.110281 0.119888 -0.72615
+0.659318 0.153731 0.167124 -0.716754
+0.647852 0.196524 0.213644 -0.704289
+0.633611 0.238474 0.259249 -0.688808
+0.616658 0.279404 0.303744 -0.670378
+0.597064 0.319137 0.346939 -0.649076
+0.574913 0.357504 0.388647 -0.624996
+0.5503 0.394339 0.428692 -0.598239
+0.523331 0.429486 0.466901 -0.56892
+0.49412 0.462794 0.50311 -0.537165
+0.462794 0.49412 0.537165 -0.50311
+0.429486 0.523331 0.56892 -0.466901
+0.394339 0.5503 0.598239 -0.428692
+0.357504 0.574913 0.624996 -0.388647
+0.319137 0.597064 0.649077 -0.346939
+0.279404 0.616658 0.670378 -0.303744
+0.238474 0.633611 0.688808 -0.259249
+0.196524 0.647852 0.704289 -0.213644
+0.153731 0.659318 0.716754 -0.167124
+0.110281 0.667961 0.72615 -0.119888
+0.0663578 0.673743 0.732436 -0.0721386
+0.0221508 0.676641 0.735586 -0.0240805
+-0.022151 0.676641 0.735586 0.0240807
+-0.066358 0.673743 0.732436 0.0721387
+-0.110281 0.667961 0.72615 0.119888
+-0.153731 0.659318 0.716754 0.167124
+-0.196524 0.647852 0.704289 0.213644
+-0.238475 0.633611 0.688808 0.259249
+-0.279404 0.616658 0.670378 0.303744
+-0.319137 0.597064 0.649077 0.346939
+-0.357504 0.574913 0.624996 0.388647
+-0.394339 0.5503 0.598239 0.428692
+-0.429486 0.523331 0.56892 0.466901
+-0.462794 0.49412 0.537165 0.50311
+-0.49412 0.462794 0.50311 0.537165
+-0.523331 0.429486 0.466901 0.56892
+-0.5503 0.394339 0.428692 0.598239
+-0.574913 0.357504 0.388647 0.624996
+-0.597063 0.319137 0.346939 0.649076
+-0.616658 0.279404 0.303744 0.670378
+-0.633611 0.238474 0.259249 0.688808
+-0.647852 0.196524 0.213644 0.704289
+-0.659318 0.153731 0.167124 0.716754
+-0.667961 0.110281 0.119888 0.72615
+-0.673743 0.0663579 0.0721386 0.732436
+-0.676641 0.022151 0.0240807 0.735586
+0.612045 0.0200363 0.326274 -0.720101
+0.609424 0.060023 0.372672 -0.69722
+0.604193 0.0997527 0.417474 -0.671353
+0.596375 0.139055 0.460489 -0.642612
+0.586004 0.177762 0.501532 -0.611118
+0.573123 0.215708 0.540427 -0.577008
+0.557788 0.25273 0.577008 -0.540427
+0.540064 0.28867 0.611118 -0.501532
+0.520028 0.323374 0.642612 -0.460489
+0.497765 0.356693 0.671353 -0.417474
+0.47337 0.388485 0.69722 -0.372672
+0.446949 0.418613 0.720101 -0.326274
+0.418613 0.446949 0.739899 -0.278478
+0.388485 0.47337 0.756528 -0.22949
+0.356693 0.497765 0.769917 -0.17952
+0.323374 0.520028 0.78001 -0.12878
+0.28867 0.540064 0.786763 -0.0774893
+0.25273 0.557788 0.790146 -0.0258667
+0.215708 0.573123 0.790146 0.0258668
+0.177762 0.586004 0.786763 0.0774894
+0.139055 0.596375 0.78001 0.12878
+0.0997526 0.604193 0.769917 0.17952
+0.0600229 0.609424 0.756528 0.22949
+0.0200362 0.612045 0.739899 0.278478
+-0.0200363 0.612045 0.720101 0.326274
+-0.060023 0.609424 0.69722 0.372672
+-0.0997527 0.604193 0.671353 0.417474
+-0.139055 0.596375 0.642612 0.460489
+-0.177762 0.586004 0.611118 0.501532
+-0.215708 0.573123 0.577008 0.540427
+-0.25273 0.557788 0.540427 0.577008
+-0.28867 0.540064 0.501532 0.611118
+-0.323374 0.520028 0.460489 0.642612
+-0.356693 0.497765 0.417474 0.671353
+-0.388485 0.47337 0.372672 0.69722
+-0.418613 0.446949 0.326274 0.720101
+-0.446949 0.418613 0.278478 0.739899
+-0.47337 0.388485 0.22949 0.756528
+-0.497765 0.356693 0.17952 0.769917
+-0.520028 0.323374 0.12878 0.78001
+-0.540064 0.28867 0.0774894 0.786763
+-0.557788 0.25273 0.0258667 0.790146
+-0.573123 0.215708 -0.0258668 0.790146
+-0.586004 0.177762 -0.0774893 0.786763
+-0.596375 0.139055 -0.12878 0.78001
+-0.604193 0.0997527 -0.17952 0.769917
+-0.609424 0.060023 -0.22949 0.756528
+-0.612045 0.0200363 -0.278478 0.739899
+0.645152 0.0211201 0.381881 -0.661438
+0.642389 0.0632698 0.424324 -0.635045
+0.636876 0.105149 0.464949 -0.605934
+0.628635 0.146577 0.503584 -0.574227
+0.617702 0.187378 0.540062 -0.540062
+0.604125 0.227376 0.574227 -0.503584
+0.58796 0.266401 0.605934 -0.464949
+0.569278 0.304285 0.635045 -0.424324
+0.548158 0.340866 0.661438 -0.381881
+0.52469 0.375988 0.684998 -0.337804
+0.498976 0.409499 0.705625 -0.292279
+0.471125 0.441257 0.72323 -0.245503
+0.441257 0.471125 0.737738 -0.197676
+0.409499 0.498976 0.749087 -0.149003
+0.375988 0.52469 0.757229 -0.099691
+0.340866 0.548158 0.762127 -0.0499524
+0.304285 0.569278 0.763763 6.27856e-08
+0.266401 0.58796 0.762127 0.0499525
+0.227376 0.604125 0.757229 0.0996911
+0.187378 0.617702 0.749087 0.149003
+0.146577 0.628635 0.737738 0.197676
+0.105148 0.636876 0.72323 0.245504
+0.0632697 0.642389 0.705625 0.292279
+0.02112 0.645152 0.684998 0.337804
+-0.0211201 0.645152 0.661438 0.381881
+-0.0632698 0.642389 0.635045 0.424324
+-0.105149 0.636876 0.605934 0.464949
+-0.146577 0.628635 0.574227 0.503584
+-0.187378 0.617702 0.540062 0.540062
+-0.227377 0.604125 0.503584 0.574227
+-0.266401 0.58796 0.464949 0.605934
+-0.304285 0.569278 0.424324 0.635045
+-0.340866 0.548158 0.381881 0.661438
+-0.375988 0.52469 0.337803 0.684998
+-0.409499 0.498976 0.292279 0.705625
+-0.441257 0.471125 0.245503 0.72323
+-0.471125 0.441257 0.197676 0.737738
+-0.498976 0.409499 0.149003 0.749087
+-0.52469 0.375988 0.0996911 0.757229
+-0.548158 0.340866 0.0499524 0.762127
+-0.569278 0.304285 8.59245e-08 0.763763
+-0.58796 0.266401 -0.0499525 0.762127
+-0.604125 0.227376 -0.0996911 0.757229
+-0.617702 0.187378 -0.149003 0.749087
+-0.628635 0.146577 -0.197676 0.737738
+-0.636876 0.105149 -0.245503 0.72323
+-0.642389 0.0632698 -0.292279 0.705625
+-0.645152 0.0211201 -0.337804 0.684998
+0.645152 0.0211201 0.245503 -0.72323
+0.642389 0.0632698 0.292279 -0.705625
+0.636876 0.105149 0.337804 -0.684998
+0.628635 0.146577 0.381881 -0.661438
+0.617702 0.187378 0.424324 -0.635045
+0.604125 0.227376 0.464949 -0.605934
+0.58796 0.266401 0.503584 -0.574227
+0.569278 0.304285 0.540062 -0.540062
+0.548158 0.340866 0.574227 -0.503584
+0.52469 0.375988 0.605934 -0.464949
+0.498976 0.409499 0.635045 -0.424324
+0.471125 0.441257 0.661438 -0.381881
+0.441257 0.471125 0.684998 -0.337804
+0.409499 0.498976 0.705625 -0.292279
+0.375988 0.52469 0.72323 -0.245503
+0.340866 0.548158 0.737738 -0.197676
+0.304285 0.569278 0.749087 -0.149003
+0.266401 0.58796 0.757229 -0.099691
+0.227376 0.604125 0.762127 -0.0499524
+0.187378 0.617702 0.763763 3.28159e-08
+0.146577 0.628635 0.762127 0.0499525
+0.105148 0.636876 0.757229 0.0996911
+0.0632697 0.642389 0.749087 0.149003
+0.02112 0.645152 0.737738 0.197676
+-0.0211201 0.645152 0.72323 0.245504
+-0.0632698 0.642389 0.705625 0.292279
+-0.105149 0.636876 0.684998 0.337804
+-0.146577 0.628635 0.661438 0.381881
+-0.187378 0.617702 0.635045 0.424324
+-0.227377 0.604125 0.605934 0.464949
+-0.266401 0.58796 0.574227 0.503584
+-0.304285 0.569278 0.540062 0.540062
+-0.340866 0.548158 0.503584 0.574227
+-0.375988 0.52469 0.464949 0.605934
+-0.409499 0.498976 0.424324 0.635045
+-0.441257 0.471125 0.381881 0.661438
+-0.471125 0.441257 0.337804 0.684998
+-0.498976 0.409499 0.292279 0.705625
+-0.52469 0.375988 0.245504 0.72323
+-0.548158 0.340866 0.197676 0.737738
+-0.569278 0.304285 0.149003 0.749087
+-0.58796 0.266401 0.099691 0.757229
+-0.604125 0.227376 0.0499524 0.762127
+-0.617702 0.187378 2.48466e-08 0.763763
+-0.628635 0.146577 -0.0499525 0.762127
+-0.636876 0.105149 -0.099691 0.757229
+-0.642389 0.0632698 -0.149003 0.749087
+-0.645152 0.0211201 -0.197676 0.737738
+0.676641 0.0221509 0.303744 -0.670378
+0.673743 0.0663579 0.346939 -0.649076
+0.667961 0.110281 0.388647 -0.624996
+0.659318 0.153731 0.428692 -0.598239
+0.647852 0.196524 0.466901 -0.56892
+0.633611 0.238474 0.50311 -0.537165
+0.616658 0.279404 0.537165 -0.50311
+0.597064 0.319137 0.56892 -0.466901
+0.574913 0.357504 0.598239 -0.428692
+0.5503 0.394339 0.624996 -0.388647
+0.523331 0.429486 0.649076 -0.346939
+0.49412 0.462794 0.670378 -0.303744
+0.462794 0.49412 0.688808 -0.259249
+0.429486 0.523331 0.704289 -0.213644
+0.394339 0.5503 0.716754 -0.167124
+0.357504 0.574913 0.72615 -0.119888
+0.319137 0.597064 0.732436 -0.0721386
+0.279404 0.616658 0.735586 -0.0240805
+0.238474 0.633611 0.735586 0.0240806
+0.196524 0.647852 0.732436 0.0721387
+0.153731 0.659318 0.72615 0.119888
+0.110281 0.667961 0.716754 0.167124
+0.0663578 0.673743 0.704289 0.213644
+0.0221508 0.676641 0.688808 0.259249
+-0.022151 0.676641 0.670378 0.303744
+-0.066358 0.673743 0.649076 0.346939
+-0.110281 0.667961 0.624996 0.388647
+-0.153731 0.659318 0.598239 0.428692
+-0.196524 0.647852 0.56892 0.466901
+-0.238475 0.633611 0.537165 0.50311
+-0.279404 0.616658 0.50311 0.537165
+-0.319137 0.597064 0.466901 0.56892
+-0.357504 0.574913 0.428692 0.598239
+-0.394339 0.5503 0.388647 0.624996
+-0.429486 0.523331 0.346939 0.649076
+-0.462794 0.49412 0.303744 0.670378
+-0.49412 0.462794 0.259249 0.688808
+-0.523331 0.429486 0.213644 0.704289
+-0.5503 0.394339 0.167124 0.716754
+-0.574913 0.357504 0.119888 0.72615
+-0.597063 0.319137 0.0721387 0.732436
+-0.616658 0.279404 0.0240806 0.735586
+-0.633611 0.238474 -0.0240807 0.735586
+-0.647852 0.196524 -0.0721386 0.732436
+-0.659318 0.153731 -0.119888 0.72615
+-0.667961 0.110281 -0.167124 0.716754
+-0.673743 0.0663579 -0.213644 0.704289
+-0.676641 0.022151 -0.259249 0.688808
+0.676641 0.0221509 0.428692 -0.598239
+0.673743 0.0663579 0.466901 -0.56892
+0.667961 0.110281 0.50311 -0.537165
+0.659318 0.153731 0.537165 -0.50311
+0.647852 0.196524 0.56892 -0.466901
+0.633611 0.238474 0.598239 -0.428692
+0.616658 0.279404 0.624996 -0.388647
+0.597064 0.319137 0.649076 -0.346939
+0.574913 0.357504 0.670378 -0.303744
+0.5503 0.394339 0.688808 -0.259249
+0.523331 0.429486 0.704289 -0.213644
+0.49412 0.462794 0.716754 -0.167124
+0.462794 0.49412 0.72615 -0.119888
+0.429486 0.523331 0.732436 -0.0721387
+0.394339 0.5503 0.735586 -0.0240806
+0.357504 0.574913 0.735586 0.0240807
+0.319137 0.597064 0.732436 0.0721387
+0.279404 0.616658 0.72615 0.119888
+0.238474 0.633611 0.716754 0.167124
+0.196524 0.647852 0.704289 0.213644
+0.153731 0.659318 0.688808 0.259249
+0.110281 0.667961 0.670378 0.303744
+0.0663578 0.673743 0.649076 0.346939
+0.0221508 0.676641 0.624996 0.388647
+-0.022151 0.676641 0.598239 0.428692
+-0.066358 0.673743 0.56892 0.466901
+-0.110281 0.667961 0.537165 0.50311
+-0.153731 0.659318 0.50311 0.537165
+-0.196524 0.647852 0.466901 0.56892
+-0.238475 0.633611 0.428692 0.598239
+-0.279404 0.616658 0.388647 0.624996
+-0.319137 0.597064 0.346939 0.649076
+-0.357504 0.574913 0.303744 0.670378
+-0.394339 0.5503 0.259249 0.688808
+-0.429486 0.523331 0.213644 0.704289
+-0.462794 0.49412 0.167124 0.716754
+-0.49412 0.462794 0.119888 0.72615
+-0.523331 0.429486 0.0721386 0.732436
+-0.5503 0.394339 0.0240807 0.735586
+-0.574913 0.357504 -0.0240806 0.735586
+-0.597063 0.319137 -0.0721386 0.732436
+-0.616658 0.279404 -0.119888 0.72615
+-0.633611 0.238474 -0.167124 0.716754
+-0.647852 0.196524 -0.213644 0.704289
+-0.659318 0.153731 -0.259249 0.688808
+-0.667961 0.110281 -0.303744 0.670378
+-0.673743 0.0663579 -0.346939 0.649076
+-0.676641 0.022151 -0.388647 0.624996
+0.706728 0.0231359 0.466228 -0.531631
+0.703702 0.0693086 0.5 -0.5
+0.697662 0.115184 0.531631 -0.466228
+0.688635 0.160567 0.560986 -0.430459
+0.676659 0.205262 0.587938 -0.392847
+0.661785 0.249078 0.612372 -0.353553
+0.644078 0.291828 0.634185 -0.312745
+0.623612 0.333328 0.653281 -0.270598
+0.600477 0.3734 0.669581 -0.227292
+0.574769 0.411874 0.683013 -0.183013
+0.546601 0.448584 0.69352 -0.13795
+0.516092 0.483373 0.701057 -0.0922959
+0.483373 0.516092 0.705593 -0.046247
+0.448584 0.546601 0.707107 1.58103e-09
+0.411874 0.574769 0.705593 0.046247
+0.3734 0.600477 0.701057 0.092296
+0.333328 0.623613 0.69352 0.13795
+0.291828 0.644078 0.683013 0.183013
+0.249078 0.661785 0.669581 0.227292
+0.205262 0.676659 0.653281 0.270598
+0.160567 0.688635 0.634185 0.312745
+0.115184 0.697662 0.612372 0.353553
+0.0693085 0.703702 0.587938 0.392848
+0.0231358 0.706728 0.560985 0.430459
+-0.023136 0.706728 0.531631 0.466228
+-0.0693086 0.703702 0.5 0.5
+-0.115185 0.697662 0.466228 0.531631
+-0.160567 0.688635 0.430459 0.560986
+-0.205262 0.676659 0.392847 0.587938
+-0.249078 0.661785 0.353553 0.612372
+-0.291828 0.644078 0.312745 0.634185
+-0.333328 0.623613 0.270598 0.653281
+-0.3734 0.600477 0.227292 0.669581
+-0.411874 0.574769 0.183013 0.683013
+-0.448584 0.546601 0.13795 0.69352
+-0.483373 0.516092 0.0922959 0.701057
+-0.516092 0.483373 0.046247 0.705593
+-0.546601 0.448584 -3.24897e-08 0.707107
+-0.574769 0.411874 -0.0462469 0.705593
+-0.600477 0.3734 -0.092296 0.701057
+-0.623612 0.333328 -0.13795 0.69352
+-0.644078 0.291828 -0.183013 0.683013
+-0.661785 0.249078 -0.227292 0.669581
+-0.676659 0.205262 -0.270598 0.653281
+-0.688635 0.160567 -0.312745 0.634185
+-0.697662 0.115185 -0.353553 0.612372
+-0.703702 0.0693086 -0.392848 0.587938
+-0.706728 0.0231359 -0.430459 0.560986
+0.706728 0.0231359 0.353553 -0.612372
+0.703702 0.0693086 0.392847 -0.587938
+0.697662 0.115184 0.430459 -0.560986
+0.688635 0.160567 0.466228 -0.531631
+0.676659 0.205262 0.5 -0.5
+0.661785 0.249078 0.531631 -0.466228
+0.644078 0.291828 0.560986 -0.430459
+0.623612 0.333328 0.587938 -0.392847
+0.600477 0.3734 0.612372 -0.353553
+0.574769 0.411874 0.634185 -0.312745
+0.546601 0.448584 0.653281 -0.270598
+0.516092 0.483373 0.669581 -0.227292
+0.483373 0.516092 0.683013 -0.183013
+0.448584 0.546601 0.69352 -0.13795
+0.411874 0.574769 0.701057 -0.0922959
+0.3734 0.600477 0.705593 -0.0462469
+0.333328 0.623613 0.707107 5.81282e-08
+0.291828 0.644078 0.705593 0.046247
+0.249078 0.661785 0.701057 0.092296
+0.205262 0.676659 0.69352 0.13795
+0.160567 0.688635 0.683013 0.183013
+0.115184 0.697662 0.669581 0.227292
+0.0693085 0.703702 0.653281 0.270598
+0.0231358 0.706728 0.634185 0.312745
+-0.023136 0.706728 0.612372 0.353553
+-0.0693086 0.703702 0.587938 0.392848
+-0.115185 0.697662 0.560985 0.430459
+-0.160567 0.688635 0.531631 0.466228
+-0.205262 0.676659 0.5 0.5
+-0.249078 0.661785 0.466228 0.531631
+-0.291828 0.644078 0.430459 0.560986
+-0.333328 0.623613 0.392848 0.587938
+-0.3734 0.600477 0.353553 0.612372
+-0.411874 0.574769 0.312745 0.634185
+-0.448584 0.546601 0.270598 0.653281
+-0.483373 0.516092 0.227292 0.669581
+-0.516092 0.483373 0.183013 0.683013
+-0.546601 0.448584 0.13795 0.69352
+-0.574769 0.411874 0.092296 0.701057
+-0.600477 0.3734 0.046247 0.705593
+-0.623612 0.333328 7.95506e-08 0.707107
+-0.644078 0.291828 -0.046247 0.705593
+-0.661785 0.249078 -0.092296 0.701057
+-0.676659 0.205262 -0.13795 0.69352
+-0.688635 0.160567 -0.183013 0.683013
+-0.697662 0.115185 -0.227292 0.669581
+-0.703702 0.0693086 -0.270598 0.653281
+-0.706728 0.0231359 -0.312745 0.634185
+0.735586 0.0240806 0.394339 -0.5503
+0.732436 0.0721387 0.429486 -0.523331
+0.72615 0.119888 0.462794 -0.49412
+0.716754 0.167124 0.49412 -0.462794
+0.704289 0.213644 0.523331 -0.429486
+0.688808 0.259249 0.5503 -0.394339
+0.670378 0.303744 0.574913 -0.357504
+0.649076 0.346939 0.597064 -0.319137
+0.624996 0.388647 0.616658 -0.279404
+0.598239 0.428692 0.633611 -0.238474
+0.56892 0.466901 0.647852 -0.196524
+0.537165 0.50311 0.659318 -0.153731
+0.50311 0.537165 0.667961 -0.110281
+0.466901 0.56892 0.673743 -0.0663579
+0.428692 0.598239 0.676641 -0.0221509
+0.388647 0.624996 0.676641 0.022151
+0.346939 0.649077 0.673743 0.066358
+0.303744 0.670378 0.667961 0.110281
+0.259249 0.688808 0.659318 0.153731
+0.213644 0.704289 0.647852 0.196524
+0.167124 0.716754 0.633611 0.238474
+0.119888 0.72615 0.616658 0.279404
+0.0721386 0.732436 0.597063 0.319137
+0.0240805 0.735586 0.574913 0.357504
+-0.0240807 0.735586 0.5503 0.394339
+-0.0721387 0.732436 0.523331 0.429486
+-0.119888 0.72615 0.49412 0.462794
+-0.167124 0.716754 0.462794 0.49412
+-0.213644 0.704289 0.429486 0.523331
+-0.259249 0.688808 0.394339 0.5503
+-0.303744 0.670378 0.357504 0.574913
+-0.346939 0.649077 0.319137 0.597063
+-0.388647 0.624996 0.279404 0.616658
+-0.428692 0.598239 0.238474 0.633611
+-0.466901 0.56892 0.196524 0.647852
+-0.50311 0.537165 0.153731 0.659318
+-0.537165 0.50311 0.110281 0.667961
+-0.56892 0.466901 0.0663579 0.673743
+-0.598239 0.428692 0.022151 0.676641
+-0.624996 0.388647 -0.0221509 0.676641
+-0.649076 0.346939 -0.0663578 0.673743
+-0.670378 0.303744 -0.110281 0.667961
+-0.688808 0.259249 -0.153731 0.659318
+-0.704289 0.213644 -0.196524 0.647852
+-0.716754 0.167124 -0.238474 0.633611
+-0.72615 0.119888 -0.279404 0.616658
+-0.732436 0.0721386 -0.319137 0.597064
+-0.735586 0.0240807 -0.357504 0.574913
+0.676641 0.0221509 0.167124 -0.716754
+0.673743 0.0663579 0.213644 -0.704289
+0.667961 0.110281 0.259249 -0.688808
+0.659318 0.153731 0.303744 -0.670378
+0.647852 0.196524 0.346939 -0.649076
+0.633611 0.238474 0.388647 -0.624996
+0.616658 0.279404 0.428692 -0.598239
+0.597064 0.319137 0.466901 -0.56892
+0.574913 0.357504 0.50311 -0.537165
+0.5503 0.394339 0.537165 -0.50311
+0.523331 0.429486 0.56892 -0.466901
+0.49412 0.462794 0.598239 -0.428692
+0.462794 0.49412 0.624996 -0.388647
+0.429486 0.523331 0.649076 -0.346939
+0.394339 0.5503 0.670378 -0.303744
+0.357504 0.574913 0.688808 -0.259249
+0.319137 0.597064 0.704289 -0.213644
+0.279404 0.616658 0.716754 -0.167124
+0.238474 0.633611 0.72615 -0.119888
+0.196524 0.647852 0.732436 -0.0721386
+0.153731 0.659318 0.735586 -0.0240806
+0.110281 0.667961 0.735586 0.0240807
+0.0663578 0.673743 0.732436 0.0721388
+0.0221508 0.676641 0.72615 0.119888
+-0.022151 0.676641 0.716754 0.167124
+-0.066358 0.673743 0.704289 0.213644
+-0.110281 0.667961 0.688808 0.259249
+-0.153731 0.659318 0.670378 0.303744
+-0.196524 0.647852 0.649076 0.346939
+-0.238475 0.633611 0.624996 0.388648
+-0.279404 0.616658 0.598239 0.428692
+-0.319137 0.597064 0.56892 0.466901
+-0.357504 0.574913 0.537165 0.50311
+-0.394339 0.5503 0.50311 0.537165
+-0.429486 0.523331 0.466901 0.56892
+-0.462794 0.49412 0.428692 0.598239
+-0.49412 0.462794 0.388647 0.624996
+-0.523331 0.429486 0.346939 0.649076
+-0.5503 0.394339 0.303744 0.670378
+-0.574913 0.357504 0.259249 0.688808
+-0.597063 0.319137 0.213644 0.704289
+-0.616658 0.279404 0.167124 0.716754
+-0.633611 0.238474 0.119888 0.72615
+-0.647852 0.196524 0.0721387 0.732436
+-0.659318 0.153731 0.0240805 0.735586
+-0.667961 0.110281 -0.0240805 0.735586
+-0.673743 0.0663579 -0.0721387 0.732436
+-0.676641 0.022151 -0.119888 0.72615
+0.706728 0.0231359 0.227292 -0.669581
+0.703702 0.0693086 0.270598 -0.653281
+0.697662 0.115184 0.312745 -0.634185
+0.688635 0.160567 0.353553 -0.612372
+0.676659 0.205262 0.392847 -0.587938
+0.661785 0.249078 0.430459 -0.560986
+0.644078 0.291828 0.466228 -0.531631
+0.623612 0.333328 0.5 -0.5
+0.600477 0.3734 0.531631 -0.466228
+0.574769 0.411874 0.560986 -0.430459
+0.546601 0.448584 0.587938 -0.392847
+0.516092 0.483373 0.612372 -0.353553
+0.483373 0.516092 0.634185 -0.312745
+0.448584 0.546601 0.653281 -0.270598
+0.411874 0.574769 0.669581 -0.227292
+0.3734 0.600477 0.683013 -0.183013
+0.333328 0.623613 0.69352 -0.13795
+0.291828 0.644078 0.701057 -0.0922959
+0.249078 0.661785 0.705593 -0.046247
+0.205262 0.676659 0.707107 3.03816e-08
+0.160567 0.688635 0.705593 0.046247
+0.115184 0.697662 0.701057 0.0922961
+0.0693085 0.703702 0.69352 0.13795
+0.0231358 0.706728 0.683013 0.183013
+-0.023136 0.706728 0.669581 0.227292
+-0.0693086 0.703702 0.653281 0.270598
+-0.115185 0.697662 0.634185 0.312745
+-0.160567 0.688635 0.612372 0.353553
+-0.205262 0.676659 0.587938 0.392848
+-0.249078 0.661785 0.560985 0.430459
+-0.291828 0.644078 0.531631 0.466228
+-0.333328 0.623613 0.5 0.5
+-0.3734 0.600477 0.466228 0.531631
+-0.411874 0.574769 0.430459 0.560986
+-0.448584 0.546601 0.392847 0.587938
+-0.483373 0.516092 0.353553 0.612372
+-0.516092 0.483373 0.312745 0.634185
+-0.546601 0.448584 0.270598 0.653281
+-0.574769 0.411874 0.227292 0.669581
+-0.600477 0.3734 0.183013 0.683013
+-0.623612 0.333328 0.13795 0.69352
+-0.644078 0.291828 0.092296 0.701057
+-0.661785 0.249078 0.0462469 0.705593
+-0.676659 0.205262 2.30035e-08 0.707107
+-0.688635 0.160567 -0.046247 0.705593
+-0.697662 0.115185 -0.0922959 0.701057
+-0.703702 0.0693086 -0.13795 0.69352
+-0.706728 0.0231359 -0.183013 0.683013
+0.706728 0.0231359 0.092296 -0.701057
+0.703702 0.0693086 0.13795 -0.69352
+0.697662 0.115184 0.183013 -0.683013
+0.688635 0.160567 0.227292 -0.669581
+0.676659 0.205262 0.270598 -0.653281
+0.661785 0.249078 0.312745 -0.634185
+0.644078 0.291828 0.353553 -0.612372
+0.623612 0.333328 0.392847 -0.587938
+0.600477 0.3734 0.430459 -0.560986
+0.574769 0.411874 0.466228 -0.531631
+0.546601 0.448584 0.5 -0.5
+0.516092 0.483373 0.531631 -0.466228
+0.483373 0.516092 0.560986 -0.430459
+0.448584 0.546601 0.587938 -0.392847
+0.411874 0.574769 0.612372 -0.353553
+0.3734 0.600477 0.634185 -0.312745
+0.333328 0.623613 0.653282 -0.270598
+0.291828 0.644078 0.669581 -0.227292
+0.249078 0.661785 0.683013 -0.183013
+0.205262 0.676659 0.69352 -0.13795
+0.160567 0.688635 0.701057 -0.0922959
+0.115184 0.697662 0.705593 -0.0462469
+0.0693085 0.703702 0.707107 8.69287e-08
+0.0231358 0.706728 0.705593 0.0462471
+-0.023136 0.706728 0.701057 0.092296
+-0.0693086 0.703702 0.69352 0.13795
+-0.115185 0.697662 0.683013 0.183013
+-0.160567 0.688635 0.669581 0.227292
+-0.205262 0.676659 0.653281 0.270598
+-0.249078 0.661785 0.634185 0.312745
+-0.291828 0.644078 0.612372 0.353553
+-0.333328 0.623613 0.587938 0.392847
+-0.3734 0.600477 0.560986 0.430459
+-0.411874 0.574769 0.531631 0.466228
+-0.448584 0.546601 0.5 0.5
+-0.483373 0.516092 0.466228 0.531631
+-0.516092 0.483373 0.430459 0.560986
+-0.546601 0.448584 0.392847 0.587938
+-0.574769 0.411874 0.353553 0.612372
+-0.600477 0.3734 0.312745 0.634185
+-0.623612 0.333328 0.270598 0.653281
+-0.644078 0.291828 0.227292 0.669581
+-0.661785 0.249078 0.183013 0.683013
+-0.676659 0.205262 0.13795 0.69352
+-0.688635 0.160567 0.0922959 0.701057
+-0.697662 0.115185 0.046247 0.705593
+-0.703702 0.0693086 -3.35437e-08 0.707107
+-0.706728 0.0231359 -0.0462469 0.705593
+0.735586 0.0240806 0.153731 -0.659318
+0.732436 0.0721387 0.196524 -0.647852
+0.72615 0.119888 0.238474 -0.633611
+0.716754 0.167124 0.279404 -0.616658
+0.704289 0.213644 0.319137 -0.597064
+0.688808 0.259249 0.357504 -0.574913
+0.670378 0.303744 0.394339 -0.5503
+0.649076 0.346939 0.429486 -0.523331
+0.624996 0.388647 0.462794 -0.49412
+0.598239 0.428692 0.49412 -0.462794
+0.56892 0.466901 0.523331 -0.429486
+0.537165 0.50311 0.5503 -0.394339
+0.50311 0.537165 0.574913 -0.357504
+0.466901 0.56892 0.597064 -0.319137
+0.428692 0.598239 0.616658 -0.279404
+0.388647 0.624996 0.633611 -0.238474
+0.346939 0.649077 0.647852 -0.196524
+0.303744 0.670378 0.659318 -0.153731
+0.259249 0.688808 0.667961 -0.110281
+0.213644 0.704289 0.673743 -0.0663579
+0.167124 0.716754 0.676641 -0.0221509
+0.119888 0.72615 0.676641 0.022151
+0.0721386 0.732436 0.673743 0.066358
+0.0240805 0.735586 0.667961 0.110281
+-0.0240807 0.735586 0.659318 0.153731
+-0.0721387 0.732436 0.647852 0.196524
+-0.119888 0.72615 0.633611 0.238474
+-0.167124 0.716754 0.616658 0.279404
+-0.213644 0.704289 0.597063 0.319137
+-0.259249 0.688808 0.574913 0.357504
+-0.303744 0.670378 0.5503 0.394339
+-0.346939 0.649077 0.523331 0.429486
+-0.388647 0.624996 0.49412 0.462794
+-0.428692 0.598239 0.462794 0.49412
+-0.466901 0.56892 0.429486 0.523331
+-0.50311 0.537165 0.394339 0.5503
+-0.537165 0.50311 0.357504 0.574913
+-0.56892 0.466901 0.319137 0.597064
+-0.598239 0.428692 0.279404 0.616658
+-0.624996 0.388647 0.238474 0.633611
+-0.649076 0.346939 0.196524 0.647852
+-0.670378 0.303744 0.153731 0.659318
+-0.688808 0.259249 0.110281 0.667961
+-0.704289 0.213644 0.0663579 0.673743
+-0.716754 0.167124 0.0221509 0.676641
+-0.72615 0.119888 -0.0221509 0.676641
+-0.732436 0.0721386 -0.0663579 0.673743
+-0.735586 0.0240807 -0.110281 0.667961
+0.735586 0.0240806 0.279404 -0.616658
+0.732436 0.0721387 0.319137 -0.597064
+0.72615 0.119888 0.357504 -0.574913
+0.716754 0.167124 0.394339 -0.5503
+0.704289 0.213644 0.429486 -0.523331
+0.688808 0.259249 0.462794 -0.49412
+0.670378 0.303744 0.49412 -0.462794
+0.649076 0.346939 0.523331 -0.429486
+0.624996 0.388647 0.5503 -0.394339
+0.598239 0.428692 0.574913 -0.357504
+0.56892 0.466901 0.597064 -0.319137
+0.537165 0.50311 0.616658 -0.279404
+0.50311 0.537165 0.633611 -0.238474
+0.466901 0.56892 0.647852 -0.196524
+0.428692 0.598239 0.659318 -0.153731
+0.388647 0.624996 0.667961 -0.110281
+0.346939 0.649077 0.673743 -0.0663579
+0.303744 0.670378 0.676641 -0.0221509
+0.259249 0.688808 0.676641 0.022151
+0.213644 0.704289 0.673743 0.0663579
+0.167124 0.716754 0.667961 0.110281
+0.119888 0.72615 0.659318 0.153731
+0.0721386 0.732436 0.647852 0.196524
+0.0240805 0.735586 0.633611 0.238474
+-0.0240807 0.735586 0.616658 0.279404
+-0.0721387 0.732436 0.597063 0.319137
+-0.119888 0.72615 0.574913 0.357504
+-0.167124 0.716754 0.5503 0.394339
+-0.213644 0.704289 0.52333 0.429486
+-0.259249 0.688808 0.49412 0.462794
+-0.303744 0.670378 0.462794 0.49412
+-0.346939 0.649077 0.429486 0.523331
+-0.388647 0.624996 0.394339 0.5503
+-0.428692 0.598239 0.357504 0.574913
+-0.466901 0.56892 0.319137 0.597064
+-0.50311 0.537165 0.279404 0.616658
+-0.537165 0.50311 0.238474 0.633611
+-0.56892 0.466901 0.196524 0.647852
+-0.598239 0.428692 0.153731 0.659318
+-0.624996 0.388647 0.110281 0.667961
+-0.649076 0.346939 0.066358 0.673743
+-0.670378 0.303744 0.0221509 0.676641
+-0.688808 0.259249 -0.022151 0.676641
+-0.704289 0.213644 -0.0663579 0.673743
+-0.716754 0.167124 -0.110281 0.667961
+-0.72615 0.119888 -0.153731 0.659318
+-0.732436 0.0721386 -0.196524 0.647852
+-0.735586 0.0240807 -0.238474 0.633611
+0.763354 0.0249896 0.322749 -0.559017
+0.760085 0.0748618 0.358619 -0.536711
+0.753561 0.124413 0.392954 -0.512107
+0.743811 0.173432 0.425606 -0.485311
+0.730875 0.221709 0.456435 -0.456435
+0.71481 0.269035 0.485311 -0.425606
+0.695684 0.31521 0.512107 -0.392954
+0.673578 0.360035 0.536711 -0.358619
+0.648589 0.403318 0.559017 -0.322749
+0.620822 0.444875 0.578929 -0.285496
+0.590396 0.484526 0.596362 -0.247021
+0.557443 0.522102 0.611241 -0.207488
+0.522102 0.557443 0.623502 -0.167067
+0.484526 0.590396 0.633094 -0.12593
+0.444875 0.620822 0.639975 -0.0842543
+0.403318 0.648589 0.644115 -0.0422175
+0.360035 0.673579 0.645497 5.30635e-08
+0.31521 0.695684 0.644115 0.0422176
+0.269035 0.71481 0.639975 0.0842543
+0.221709 0.730875 0.633094 0.12593
+0.173432 0.743811 0.623502 0.167067
+0.124413 0.753561 0.611241 0.207488
+0.0748617 0.760085 0.596362 0.247021
+0.0249895 0.763354 0.578929 0.285496
+-0.0249897 0.763354 0.559017 0.322749
+-0.0748619 0.760085 0.536711 0.358619
+-0.124414 0.753561 0.512107 0.392954
+-0.173432 0.743811 0.48531 0.425606
+-0.221709 0.730875 0.456435 0.456436
+-0.269036 0.71481 0.425606 0.485311
+-0.31521 0.695684 0.392954 0.512107
+-0.360035 0.673579 0.358619 0.536711
+-0.403318 0.648589 0.322749 0.559017
+-0.444875 0.620822 0.285496 0.578929
+-0.484526 0.590397 0.247021 0.596362
+-0.522102 0.557443 0.207488 0.611241
+-0.557443 0.522102 0.167067 0.623502
+-0.590397 0.484526 0.12593 0.633094
+-0.620822 0.444875 0.0842544 0.639975
+-0.648589 0.403318 0.0422175 0.644115
+-0.673578 0.360035 7.26194e-08 0.645497
+-0.695684 0.31521 -0.0422175 0.644115
+-0.71481 0.269035 -0.0842544 0.639975
+-0.730875 0.221709 -0.12593 0.633094
+-0.743811 0.173432 -0.167067 0.623502
+-0.753561 0.124414 -0.207488 0.611241
+-0.760085 0.0748618 -0.247021 0.596362
+-0.763354 0.0249897 -0.285496 0.578929
+0.763354 0.0249896 0.207488 -0.611241
+0.760085 0.0748618 0.247021 -0.596362
+0.753561 0.124413 0.285496 -0.578929
+0.743811 0.173432 0.322749 -0.559017
+0.730875 0.221709 0.358619 -0.536711
+0.71481 0.269035 0.392954 -0.512107
+0.695684 0.31521 0.425606 -0.485311
+0.673578 0.360035 0.456435 -0.456435
+0.648589 0.403318 0.485311 -0.425606
+0.620822 0.444875 0.512107 -0.392954
+0.590396 0.484526 0.536711 -0.358619
+0.557443 0.522102 0.559017 -0.322749
+0.522102 0.557443 0.578929 -0.285496
+0.484526 0.590396 0.596362 -0.247021
+0.444875 0.620822 0.611241 -0.207488
+0.403318 0.648589 0.623502 -0.167067
+0.360035 0.673579 0.633094 -0.12593
+0.31521 0.695684 0.639975 -0.0842543
+0.269035 0.71481 0.644115 -0.0422175
+0.221709 0.730875 0.645497 2.77345e-08
+0.173432 0.743811 0.644115 0.0422176
+0.124413 0.753561 0.639975 0.0842544
+0.0748617 0.760085 0.633094 0.12593
+0.0249895 0.763354 0.623502 0.167067
+-0.0249897 0.763354 0.611241 0.207488
+-0.0748619 0.760085 0.596362 0.247021
+-0.124414 0.753561 0.578929 0.285496
+-0.173432 0.743811 0.559017 0.322749
+-0.221709 0.730875 0.536711 0.358619
+-0.269036 0.71481 0.512107 0.392954
+-0.31521 0.695684 0.48531 0.425606
+-0.360035 0.673579 0.456436 0.456435
+-0.403318 0.648589 0.425606 0.485311
+-0.444875 0.620822 0.392954 0.512107
+-0.484526 0.590397 0.358619 0.536711
+-0.522102 0.557443 0.322749 0.559017
+-0.557443 0.522102 0.285496 0.578929
+-0.590397 0.484526 0.247021 0.596362
+-0.620822 0.444875 0.207488 0.611241
+-0.648589 0.403318 0.167067 0.623502
+-0.673578 0.360035 0.12593 0.633094
+-0.695684 0.31521 0.0842543 0.639975
+-0.71481 0.269035 0.0422175 0.644115
+-0.730875 0.221709 2.09992e-08 0.645497
+-0.743811 0.173432 -0.0422176 0.644115
+-0.753561 0.124414 -0.0842543 0.639975
+-0.760085 0.0748618 -0.12593 0.633094
+-0.763354 0.0249897 -0.167067 0.623502
+0.790146 0.0258667 0.25273 -0.557788
+0.786763 0.0774894 0.28867 -0.540064
+0.78001 0.12878 0.323374 -0.520028
+0.769917 0.17952 0.356693 -0.497765
+0.756528 0.22949 0.388485 -0.47337
+0.739899 0.278478 0.418613 -0.446949
+0.720101 0.326274 0.446949 -0.418613
+0.69722 0.372672 0.47337 -0.388485
+0.671353 0.417474 0.497765 -0.356693
+0.642612 0.460489 0.520028 -0.323374
+0.611118 0.501532 0.540064 -0.28867
+0.577008 0.540427 0.557788 -0.25273
+0.540427 0.577008 0.573123 -0.215708
+0.501532 0.611118 0.586004 -0.177762
+0.460489 0.642612 0.596375 -0.139055
+0.417474 0.671353 0.604193 -0.0997526
+0.372672 0.69722 0.609424 -0.0600229
+0.326274 0.720101 0.612045 -0.0200362
+0.278478 0.739899 0.612045 0.0200363
+0.22949 0.756528 0.609424 0.060023
+0.17952 0.769917 0.604193 0.0997527
+0.12878 0.78001 0.596375 0.139055
+0.0774893 0.786763 0.586004 0.177762
+0.0258666 0.790146 0.573123 0.215708
+-0.0258668 0.790146 0.557788 0.25273
+-0.0774894 0.786763 0.540064 0.28867
+-0.12878 0.78001 0.520028 0.323374
+-0.17952 0.769917 0.497765 0.356693
+-0.22949 0.756528 0.47337 0.388485
+-0.278478 0.739899 0.446949 0.418613
+-0.326274 0.720101 0.418613 0.446949
+-0.372672 0.69722 0.388485 0.47337
+-0.417474 0.671353 0.356693 0.497765
+-0.460489 0.642612 0.323374 0.520028
+-0.501532 0.611118 0.28867 0.540064
+-0.540427 0.577008 0.25273 0.557788
+-0.577008 0.540427 0.215708 0.573123
+-0.611118 0.501532 0.177762 0.586004
+-0.642612 0.460489 0.139055 0.596375
+-0.671353 0.417474 0.0997527 0.604193
+-0.69722 0.372672 0.0600231 0.609424
+-0.720101 0.326274 0.0200363 0.612045
+-0.739899 0.278478 -0.0200363 0.612045
+-0.756528 0.22949 -0.060023 0.609424
+-0.769917 0.179519 -0.0997527 0.604193
+-0.78001 0.12878 -0.139055 0.596375
+-0.786763 0.0774893 -0.177762 0.586004
+-0.790146 0.0258668 -0.215708 0.573123
+0.612045 0.0200363 -0.278478 -0.739899
+0.609424 0.060023 -0.22949 -0.756528
+0.604193 0.0997527 -0.17952 -0.769917
+0.596375 0.139055 -0.12878 -0.78001
+0.586004 0.177762 -0.0774894 -0.786763
+0.573123 0.215708 -0.0258667 -0.790146
+0.557788 0.25273 0.0258667 -0.790146
+0.540064 0.28867 0.0774894 -0.786763
+0.520028 0.323374 0.12878 -0.78001
+0.497765 0.356693 0.17952 -0.769917
+0.47337 0.388485 0.22949 -0.756528
+0.446949 0.418613 0.278478 -0.739899
+0.418613 0.446949 0.326274 -0.720101
+0.388485 0.47337 0.372672 -0.69722
+0.356693 0.497765 0.417474 -0.671353
+0.323374 0.520028 0.460489 -0.642612
+0.28867 0.540064 0.501532 -0.611118
+0.25273 0.557788 0.540427 -0.577008
+0.215708 0.573123 0.577008 -0.540427
+0.177762 0.586004 0.611118 -0.501532
+0.139055 0.596375 0.642612 -0.460489
+0.0997526 0.604193 0.671353 -0.417474
+0.0600229 0.609424 0.69722 -0.372672
+0.0200362 0.612045 0.720101 -0.326273
+-0.0200363 0.612045 0.739899 -0.278478
+-0.060023 0.609424 0.756528 -0.22949
+-0.0997527 0.604193 0.769917 -0.179519
+-0.139055 0.596375 0.78001 -0.12878
+-0.177762 0.586004 0.786763 -0.0774892
+-0.215708 0.573123 0.790146 -0.0258666
+-0.25273 0.557788 0.790146 0.0258668
+-0.28867 0.540064 0.786763 0.0774893
+-0.323374 0.520028 0.78001 0.12878
+-0.356693 0.497765 0.769917 0.17952
+-0.388485 0.47337 0.756528 0.22949
+-0.418613 0.446949 0.739899 0.278478
+-0.446949 0.418613 0.720101 0.326274
+-0.47337 0.388485 0.69722 0.372672
+-0.497765 0.356693 0.671353 0.417474
+-0.520028 0.323374 0.642612 0.460489
+-0.540064 0.28867 0.611118 0.501532
+-0.557788 0.25273 0.577008 0.540427
+-0.573123 0.215708 0.540427 0.577008
+-0.586004 0.177762 0.501532 0.611118
+-0.596375 0.139055 0.460489 0.642612
+-0.604193 0.0997527 0.417474 0.671353
+-0.609424 0.060023 0.372672 0.69722
+-0.612045 0.0200363 0.326274 0.720101
+0.645152 0.0211201 -0.197676 -0.737738
+0.642389 0.0632698 -0.149003 -0.749087
+0.636876 0.105149 -0.099691 -0.757229
+0.628635 0.146577 -0.0499525 -0.762127
+0.617702 0.187378 5.69236e-10 -0.763763
+0.604125 0.227376 0.0499525 -0.762127
+0.58796 0.266401 0.099691 -0.757229
+0.569278 0.304285 0.149003 -0.749087
+0.548158 0.340866 0.197676 -0.737738
+0.52469 0.375988 0.245503 -0.72323
+0.498976 0.409499 0.292279 -0.705625
+0.471125 0.441257 0.337804 -0.684998
+0.441257 0.471125 0.381881 -0.661438
+0.409499 0.498976 0.424324 -0.635045
+0.375988 0.52469 0.464949 -0.605934
+0.340866 0.548158 0.503584 -0.574227
+0.304285 0.569278 0.540062 -0.540062
+0.266401 0.58796 0.574227 -0.503584
+0.227376 0.604125 0.605934 -0.464949
+0.187378 0.617702 0.635045 -0.424324
+0.146577 0.628635 0.661438 -0.381881
+0.105148 0.636876 0.684998 -0.337803
+0.0632697 0.642389 0.705625 -0.292279
+0.02112 0.645152 0.72323 -0.245503
+-0.0211201 0.645152 0.737738 -0.197676
+-0.0632698 0.642389 0.749087 -0.149003
+-0.105149 0.636876 0.757229 -0.099691
+-0.146577 0.628635 0.762127 -0.0499524
+-0.187378 0.617702 0.763763 1.25002e-07
+-0.227377 0.604125 0.762127 0.0499526
+-0.266401 0.58796 0.757229 0.0996911
+-0.304285 0.569278 0.749087 0.149003
+-0.340866 0.548158 0.737738 0.197676
+-0.375988 0.52469 0.72323 0.245504
+-0.409499 0.498976 0.705625 0.292279
+-0.441257 0.471125 0.684998 0.337804
+-0.471125 0.441257 0.661438 0.381881
+-0.498976 0.409499 0.635045 0.424324
+-0.52469 0.375988 0.605934 0.464949
+-0.548158 0.340866 0.574227 0.503584
+-0.569278 0.304285 0.540062 0.540062
+-0.58796 0.266401 0.503584 0.574227
+-0.604125 0.227376 0.464949 0.605934
+-0.617702 0.187378 0.424324 0.635045
+-0.628635 0.146577 0.381881 0.661438
+-0.636876 0.105149 0.337804 0.684998
+-0.642389 0.0632698 0.292279 0.705625
+-0.645152 0.0211201 0.245504 0.72323
+0.645152 0.0211201 -0.337804 -0.684998
+0.642389 0.0632698 -0.292279 -0.705625
+0.636876 0.105149 -0.245503 -0.72323
+0.628635 0.146577 -0.197676 -0.737738
+0.617702 0.187378 -0.149003 -0.749087
+0.604125 0.227376 -0.099691 -0.757229
+0.58796 0.266401 -0.0499525 -0.762127
+0.569278 0.304285 1.61233e-08 -0.763763
+0.548158 0.340866 0.0499525 -0.762127
+0.52469 0.375988 0.099691 -0.757229
+0.498976 0.409499 0.149003 -0.749087
+0.471125 0.441257 0.197676 -0.737738
+0.441257 0.471125 0.245503 -0.72323
+0.409499 0.498976 0.292279 -0.705625
+0.375988 0.52469 0.337804 -0.684998
+0.340866 0.548158 0.381881 -0.661438
+0.304285 0.569278 0.424324 -0.635045
+0.266401 0.58796 0.464949 -0.605934
+0.227376 0.604125 0.503584 -0.574227
+0.187378 0.617702 0.540062 -0.540062
+0.146577 0.628635 0.574227 -0.503584
+0.105148 0.636876 0.605934 -0.464949
+0.0632697 0.642389 0.635045 -0.424324
+0.02112 0.645152 0.661438 -0.381881
+-0.0211201 0.645152 0.684998 -0.337804
+-0.0632698 0.642389 0.705625 -0.292279
+-0.105149 0.636876 0.72323 -0.245503
+-0.146577 0.628635 0.737738 -0.197676
+-0.187378 0.617702 0.749087 -0.149003
+-0.227377 0.604125 0.757229 -0.0996909
+-0.266401 0.58796 0.762127 -0.0499524
+-0.304285 0.569278 0.763763 -8.70629e-08
+-0.340866 0.548158 0.762127 0.0499525
+-0.375988 0.52469 0.757229 0.0996911
+-0.409499 0.498976 0.749087 0.149003
+-0.441257 0.471125 0.737738 0.197676
+-0.471125 0.441257 0.72323 0.245503
+-0.498976 0.409499 0.705625 0.292279
+-0.52469 0.375988 0.684998 0.337804
+-0.548158 0.340866 0.661438 0.381881
+-0.569278 0.304285 0.635045 0.424324
+-0.58796 0.266401 0.605934 0.464949
+-0.604125 0.227376 0.574227 0.503584
+-0.617702 0.187378 0.540062 0.540062
+-0.628635 0.146577 0.503584 0.574227
+-0.636876 0.105149 0.464949 0.605934
+-0.642389 0.0632698 0.424324 0.635045
+-0.645152 0.0211201 0.381881 0.661438
+0.676641 0.0221509 -0.259249 -0.688808
+0.673743 0.0663579 -0.213644 -0.704289
+0.667961 0.110281 -0.167124 -0.716754
+0.659318 0.153731 -0.119888 -0.72615
+0.647852 0.196524 -0.0721387 -0.732436
+0.633611 0.238474 -0.0240806 -0.735586
+0.616658 0.279404 0.0240806 -0.735586
+0.597064 0.319137 0.0721387 -0.732436
+0.574913 0.357504 0.119888 -0.72615
+0.5503 0.394339 0.167124 -0.716754
+0.523331 0.429486 0.213644 -0.704289
+0.49412 0.462794 0.259249 -0.688808
+0.462794 0.49412 0.303744 -0.670378
+0.429486 0.523331 0.346939 -0.649076
+0.394339 0.5503 0.388647 -0.624996
+0.357504 0.574913 0.428692 -0.598239
+0.319137 0.597064 0.466901 -0.56892
+0.279404 0.616658 0.50311 -0.537165
+0.238474 0.633611 0.537165 -0.50311
+0.196524 0.647852 0.56892 -0.466901
+0.153731 0.659318 0.598239 -0.428692
+0.110281 0.667961 0.624996 -0.388647
+0.0663578 0.673743 0.649077 -0.346939
+0.0221508 0.676641 0.670378 -0.303744
+-0.022151 0.676641 0.688808 -0.259249
+-0.066358 0.673743 0.704289 -0.213644
+-0.110281 0.667961 0.716754 -0.167124
+-0.153731 0.659318 0.72615 -0.119888
+-0.196524 0.647852 0.732436 -0.0721385
+-0.238475 0.633611 0.735586 -0.0240805
+-0.279404 0.616658 0.735586 0.0240807
+-0.319137 0.597064 0.732436 0.0721386
+-0.357504 0.574913 0.72615 0.119888
+-0.394339 0.5503 0.716754 0.167124
+-0.429486 0.523331 0.704289 0.213644
+-0.462794 0.49412 0.688808 0.259249
+-0.49412 0.462794 0.670378 0.303744
+-0.523331 0.429486 0.649076 0.346939
+-0.5503 0.394339 0.624996 0.388647
+-0.574913 0.357504 0.598239 0.428692
+-0.597063 0.319137 0.56892 0.466901
+-0.616658 0.279404 0.537165 0.50311
+-0.633611 0.238474 0.50311 0.537165
+-0.647852 0.196524 0.466901 0.56892
+-0.659318 0.153731 0.428692 0.598239
+-0.667961 0.110281 0.388647 0.624996
+-0.673743 0.0663579 0.346939 0.649076
+-0.676641 0.022151 0.303744 0.670378
+0.676641 0.0221509 -0.119888 -0.72615
+0.673743 0.0663579 -0.0721387 -0.732436
+0.667961 0.110281 -0.0240806 -0.735586
+0.659318 0.153731 0.0240806 -0.735586
+0.647852 0.196524 0.0721387 -0.732436
+0.633611 0.238474 0.119888 -0.72615
+0.616658 0.279404 0.167124 -0.716754
+0.597064 0.319137 0.213644 -0.704289
+0.574913 0.357504 0.259249 -0.688808
+0.5503 0.394339 0.303744 -0.670378
+0.523331 0.429486 0.346939 -0.649076
+0.49412 0.462794 0.388647 -0.624996
+0.462794 0.49412 0.428692 -0.598239
+0.429486 0.523331 0.466901 -0.56892
+0.394339 0.5503 0.50311 -0.537165
+0.357504 0.574913 0.537165 -0.50311
+0.319137 0.597064 0.56892 -0.466901
+0.279404 0.616658 0.598239 -0.428692
+0.238474 0.633611 0.624996 -0.388647
+0.196524 0.647852 0.649076 -0.346939
+0.153731 0.659318 0.670378 -0.303744
+0.110281 0.667961 0.688808 -0.259249
+0.0663578 0.673743 0.704289 -0.213644
+0.0221508 0.676641 0.716754 -0.167124
+-0.022151 0.676641 0.72615 -0.119888
+-0.066358 0.673743 0.732436 -0.0721386
+-0.110281 0.667961 0.735586 -0.0240805
+-0.153731 0.659318 0.735586 0.0240806
+-0.196524 0.647852 0.732436 0.0721388
+-0.238475 0.633611 0.72615 0.119888
+-0.279404 0.616658 0.716754 0.167124
+-0.319137 0.597064 0.704289 0.213644
+-0.357504 0.574913 0.688808 0.259249
+-0.394339 0.5503 0.670378 0.303744
+-0.429486 0.523331 0.649076 0.346939
+-0.462794 0.49412 0.624996 0.388647
+-0.49412 0.462794 0.598239 0.428692
+-0.523331 0.429486 0.56892 0.466901
+-0.5503 0.394339 0.537165 0.50311
+-0.574913 0.357504 0.50311 0.537165
+-0.597063 0.319137 0.466901 0.56892
+-0.616658 0.279404 0.428692 0.598239
+-0.633611 0.238474 0.388647 0.624996
+-0.647852 0.196524 0.346939 0.649076
+-0.659318 0.153731 0.303744 0.670378
+-0.667961 0.110281 0.259249 0.688808
+-0.673743 0.0663579 0.213644 0.704289
+-0.676641 0.022151 0.167124 0.716754
+0.706728 0.0231359 -0.046247 -0.705593
+0.703702 0.0693086 1.93179e-09 -0.707107
+0.697662 0.115184 0.046247 -0.705593
+0.688635 0.160567 0.092296 -0.701057
+0.676659 0.205262 0.13795 -0.69352
+0.661785 0.249078 0.183013 -0.683013
+0.644078 0.291828 0.227292 -0.669581
+0.623612 0.333328 0.270598 -0.653281
+0.600477 0.3734 0.312745 -0.634185
+0.574769 0.411874 0.353553 -0.612372
+0.546601 0.448584 0.392847 -0.587938
+0.516092 0.483373 0.430459 -0.560986
+0.483373 0.516092 0.466228 -0.531631
+0.448584 0.546601 0.5 -0.5
+0.411874 0.574769 0.531631 -0.466228
+0.3734 0.600477 0.560986 -0.430459
+0.333328 0.623613 0.587938 -0.392847
+0.291828 0.644078 0.612372 -0.353553
+0.249078 0.661785 0.634185 -0.312745
+0.205262 0.676659 0.653281 -0.270598
+0.160567 0.688635 0.669581 -0.227292
+0.115184 0.697662 0.683013 -0.183013
+0.0693085 0.703702 0.69352 -0.13795
+0.0231358 0.706728 0.701057 -0.0922959
+-0.023136 0.706728 0.705593 -0.0462469
+-0.0693086 0.703702 0.707107 5.91822e-08
+-0.115185 0.697662 0.705593 0.046247
+-0.160567 0.688635 0.701057 0.092296
+-0.205262 0.676659 0.69352 0.13795
+-0.249078 0.661785 0.683013 0.183013
+-0.291828 0.644078 0.669581 0.227292
+-0.333328 0.623613 0.653282 0.270598
+-0.3734 0.600477 0.634185 0.312745
+-0.411874 0.574769 0.612372 0.353553
+-0.448584 0.546601 0.587938 0.392847
+-0.483373 0.516092 0.560985 0.430459
+-0.516092 0.483373 0.531631 0.466228
+-0.546601 0.448584 0.5 0.5
+-0.574769 0.411874 0.466228 0.531631
+-0.600477 0.3734 0.430459 0.560986
+-0.623612 0.333328 0.392848 0.587938
+-0.644078 0.291828 0.353553 0.612372
+-0.661785 0.249078 0.312745 0.634185
+-0.676659 0.205262 0.270598 0.653281
+-0.688635 0.160567 0.227292 0.669581
+-0.697662 0.115185 0.183013 0.683013
+-0.703702 0.0693086 0.13795 0.69352
+-0.706728 0.0231359 0.092296 0.701057
+0.706728 0.0231359 -0.183013 -0.683013
+0.703702 0.0693086 -0.13795 -0.69352
+0.697662 0.115184 -0.092296 -0.701057
+0.688635 0.160567 -0.046247 -0.705593
+0.676659 0.205262 5.2701e-10 -0.707107
+0.661785 0.249078 0.046247 -0.705593
+0.644078 0.291828 0.092296 -0.701057
+0.623612 0.333328 0.13795 -0.69352
+0.600477 0.3734 0.183013 -0.683013
+0.574769 0.411874 0.227292 -0.669581
+0.546601 0.448584 0.270598 -0.653281
+0.516092 0.483373 0.312745 -0.634185
+0.483373 0.516092 0.353553 -0.612372
+0.448584 0.546601 0.392847 -0.587938
+0.411874 0.574769 0.430459 -0.560986
+0.3734 0.600477 0.466228 -0.531631
+0.333328 0.623613 0.5 -0.5
+0.291828 0.644078 0.531631 -0.466228
+0.249078 0.661785 0.560986 -0.430459
+0.205262 0.676659 0.587938 -0.392847
+0.160567 0.688635 0.612372 -0.353553
+0.115184 0.697662 0.634185 -0.312745
+0.0693085 0.703702 0.653282 -0.270598
+0.0231358 0.706728 0.669581 -0.227292
+-0.023136 0.706728 0.683013 -0.183013
+-0.0693086 0.703702 0.69352 -0.13795
+-0.115185 0.697662 0.701057 -0.0922959
+-0.160567 0.688635 0.705593 -0.046247
+-0.205262 0.676659 0.707107 1.15729e-07
+-0.249078 0.661785 0.705593 0.0462471
+-0.291828 0.644078 0.701057 0.0922961
+-0.333328 0.623613 0.69352 0.13795
+-0.3734 0.600477 0.683013 0.183013
+-0.411874 0.574769 0.669581 0.227292
+-0.448584 0.546601 0.653281 0.270598
+-0.483373 0.516092 0.634185 0.312745
+-0.516092 0.483373 0.612372 0.353553
+-0.546601 0.448584 0.587938 0.392848
+-0.574769 0.411874 0.560986 0.430459
+-0.600477 0.3734 0.531631 0.466228
+-0.623612 0.333328 0.5 0.5
+-0.644078 0.291828 0.466228 0.531631
+-0.661785 0.249078 0.430459 0.560986
+-0.676659 0.205262 0.392847 0.587938
+-0.688635 0.160567 0.353553 0.612372
+-0.697662 0.115185 0.312745 0.634185
+-0.703702 0.0693086 0.270598 0.653281
+-0.706728 0.0231359 0.227292 0.669581
+0.735586 0.0240806 -0.110281 -0.667961
+0.732436 0.0721387 -0.0663579 -0.673743
+0.72615 0.119888 -0.0221509 -0.676641
+0.716754 0.167124 0.0221509 -0.676641
+0.704289 0.213644 0.0663579 -0.673743
+0.688808 0.259249 0.110281 -0.667961
+0.670378 0.303744 0.153731 -0.659318
+0.649076 0.346939 0.196524 -0.647852
+0.624996 0.388647 0.238474 -0.633611
+0.598239 0.428692 0.279404 -0.616658
+0.56892 0.466901 0.319137 -0.597064
+0.537165 0.50311 0.357504 -0.574913
+0.50311 0.537165 0.394339 -0.5503
+0.466901 0.56892 0.429486 -0.523331
+0.428692 0.598239 0.462794 -0.49412
+0.388647 0.624996 0.49412 -0.462794
+0.346939 0.649077 0.523331 -0.429486
+0.303744 0.670378 0.5503 -0.394339
+0.259249 0.688808 0.574913 -0.357504
+0.213644 0.704289 0.597064 -0.319137
+0.167124 0.716754 0.616658 -0.279404
+0.119888 0.72615 0.633611 -0.238474
+0.0721386 0.732436 0.647852 -0.196524
+0.0240805 0.735586 0.659318 -0.153731
+-0.0240807 0.735586 0.667961 -0.110281
+-0.0721387 0.732436 0.673743 -0.0663579
+-0.119888 0.72615 0.676641 -0.0221509
+-0.167124 0.716754 0.676641 0.022151
+-0.213644 0.704289 0.673743 0.066358
+-0.259249 0.688808 0.667961 0.110281
+-0.303744 0.670378 0.659318 0.153731
+-0.346939 0.649077 0.647852 0.196524
+-0.388647 0.624996 0.633611 0.238474
+-0.428692 0.598239 0.616658 0.279404
+-0.466901 0.56892 0.597064 0.319137
+-0.50311 0.537165 0.574913 0.357504
+-0.537165 0.50311 0.5503 0.394339
+-0.56892 0.466901 0.523331 0.429486
+-0.598239 0.428692 0.49412 0.462794
+-0.624996 0.388647 0.462794 0.49412
+-0.649076 0.346939 0.429486 0.523331
+-0.670378 0.303744 0.394339 0.5503
+-0.688808 0.259249 0.357504 0.574913
+-0.704289 0.213644 0.319137 0.597064
+-0.716754 0.167124 0.279404 0.616658
+-0.72615 0.119888 0.238474 0.633611
+-0.732436 0.0721386 0.196524 0.647852
+-0.735586 0.0240807 0.153731 0.659318
+0.676641 0.0221509 -0.388647 -0.624996
+0.673743 0.0663579 -0.346939 -0.649076
+0.667961 0.110281 -0.303744 -0.670378
+0.659318 0.153731 -0.259249 -0.688808
+0.647852 0.196524 -0.213644 -0.704289
+0.633611 0.238474 -0.167124 -0.716754
+0.616658 0.279404 -0.119888 -0.72615
+0.597064 0.319137 -0.0721386 -0.732436
+0.574913 0.357504 -0.0240806 -0.735586
+0.5503 0.394339 0.0240806 -0.735586
+0.523331 0.429486 0.0721386 -0.732436
+0.49412 0.462794 0.119888 -0.72615
+0.462794 0.49412 0.167124 -0.716754
+0.429486 0.523331 0.213644 -0.704289
+0.394339 0.5503 0.259249 -0.688808
+0.357504 0.574913 0.303744 -0.670378
+0.319137 0.597064 0.346939 -0.649076
+0.279404 0.616658 0.388647 -0.624996
+0.238474 0.633611 0.428692 -0.598239
+0.196524 0.647852 0.466901 -0.56892
+0.153731 0.659318 0.50311 -0.537165
+0.110281 0.667961 0.537165 -0.50311
+0.0663578 0.673743 0.56892 -0.466901
+0.0221508 0.676641 0.598239 -0.428692
+-0.022151 0.676641 0.624996 -0.388647
+-0.066358 0.673743 0.649077 -0.346939
+-0.110281 0.667961 0.670378 -0.303744
+-0.153731 0.659318 0.688808 -0.259249
+-0.196524 0.647852 0.704289 -0.213644
+-0.238475 0.633611 0.716754 -0.167123
+-0.279404 0.616658 0.72615 -0.119888
+-0.319137 0.597064 0.732436 -0.0721387
+-0.357504 0.574913 0.735586 -0.0240806
+-0.394339 0.5503 0.735586 0.0240807
+-0.429486 0.523331 0.732436 0.0721386
+-0.462794 0.49412 0.72615 0.119888
+-0.49412 0.462794 0.716754 0.167124
+-0.523331 0.429486 0.704289 0.213644
+-0.5503 0.394339 0.688808 0.259249
+-0.574913 0.357504 0.670378 0.303744
+-0.597063 0.319137 0.649077 0.346939
+-0.616658 0.279404 0.624996 0.388647
+-0.633611 0.238474 0.598239 0.428692
+-0.647852 0.196524 0.56892 0.466901
+-0.659318 0.153731 0.537165 0.50311
+-0.667961 0.110281 0.50311 0.537165
+-0.673743 0.0663579 0.466901 0.56892
+-0.676641 0.022151 0.428692 0.598239
+0.706728 0.0231359 -0.312745 -0.634185
+0.703702 0.0693086 -0.270598 -0.653281
+0.697662 0.115184 -0.227292 -0.669581
+0.688635 0.160567 -0.183013 -0.683013
+0.676659 0.205262 -0.13795 -0.69352
+0.661785 0.249078 -0.092296 -0.701057
+0.644078 0.291828 -0.046247 -0.705593
+0.623612 0.333328 1.49273e-08 -0.707107
+0.600477 0.3734 0.046247 -0.705593
+0.574769 0.411874 0.092296 -0.701057
+0.546601 0.448584 0.13795 -0.69352
+0.516092 0.483373 0.183013 -0.683013
+0.483373 0.516092 0.227292 -0.669581
+0.448584 0.546601 0.270598 -0.653281
+0.411874 0.574769 0.312745 -0.634185
+0.3734 0.600477 0.353553 -0.612372
+0.333328 0.623613 0.392848 -0.587938
+0.291828 0.644078 0.430459 -0.560985
+0.249078 0.661785 0.466228 -0.531631
+0.205262 0.676659 0.5 -0.5
+0.160567 0.688635 0.531631 -0.466228
+0.115184 0.697662 0.560986 -0.430459
+0.0693085 0.703702 0.587938 -0.392847
+0.0231358 0.706728 0.612372 -0.353553
+-0.023136 0.706728 0.634185 -0.312745
+-0.0693086 0.703702 0.653282 -0.270598
+-0.115185 0.697662 0.669581 -0.227292
+-0.160567 0.688635 0.683013 -0.183013
+-0.205262 0.676659 0.69352 -0.13795
+-0.249078 0.661785 0.701057 -0.0922959
+-0.291828 0.644078 0.705593 -0.0462469
+-0.333328 0.623613 0.707107 -8.06046e-08
+-0.3734 0.600477 0.705593 0.046247
+-0.411874 0.574769 0.701057 0.092296
+-0.448584 0.546601 0.69352 0.13795
+-0.483373 0.516092 0.683013 0.183013
+-0.516092 0.483373 0.669581 0.227292
+-0.546601 0.448584 0.653281 0.270598
+-0.574769 0.411874 0.634185 0.312745
+-0.600477 0.3734 0.612372 0.353553
+-0.623612 0.333328 0.587938 0.392847
+-0.644078 0.291828 0.560986 0.430459
+-0.661785 0.249078 0.531631 0.466228
+-0.676659 0.205262 0.5 0.5
+-0.688635 0.160567 0.466228 0.531631
+-0.697662 0.115185 0.430459 0.560986
+-0.703702 0.0693086 0.392847 0.587938
+-0.706728 0.0231359 0.353553 0.612372
+0.706728 0.0231359 -0.430459 -0.560986
+0.703702 0.0693086 -0.392847 -0.587938
+0.697662 0.115184 -0.353553 -0.612372
+0.688635 0.160567 -0.312745 -0.634185
+0.676659 0.205262 -0.270598 -0.653281
+0.661785 0.249078 -0.227292 -0.669581
+0.644078 0.291828 -0.183013 -0.683013
+0.623612 0.333328 -0.13795 -0.69352
+0.600477 0.3734 -0.092296 -0.701057
+0.574769 0.411874 -0.046247 -0.705593
+0.546601 0.448584 -1.28193e-08 -0.707107
+0.516092 0.483373 0.046247 -0.705593
+0.483373 0.516092 0.092296 -0.701057
+0.448584 0.546601 0.13795 -0.69352
+0.411874 0.574769 0.183013 -0.683013
+0.3734 0.600477 0.227292 -0.669581
+0.333328 0.623613 0.270598 -0.653281
+0.291828 0.644078 0.312745 -0.634185
+0.249078 0.661785 0.353553 -0.612372
+0.205262 0.676659 0.392848 -0.587938
+0.160567 0.688635 0.430459 -0.560986
+0.115184 0.697662 0.466228 -0.531631
+0.0693085 0.703702 0.5 -0.5
+0.0231358 0.706728 0.531631 -0.466228
+-0.023136 0.706728 0.560986 -0.430459
+-0.0693086 0.703702 0.587938 -0.392847
+-0.115185 0.697662 0.612372 -0.353553
+-0.160567 0.688635 0.634185 -0.312745
+-0.205262 0.676659 0.653282 -0.270598
+-0.249078 0.661785 0.669581 -0.227292
+-0.291828 0.644078 0.683013 -0.183013
+-0.333328 0.623613 0.69352 -0.13795
+-0.3734 0.600477 0.701057 -0.092296
+-0.411874 0.574769 0.705593 -0.0462469
+-0.448584 0.546601 0.707107 -2.40575e-08
+-0.483373 0.516092 0.705593 0.046247
+-0.516092 0.483373 0.701057 0.0922959
+-0.546601 0.448584 0.69352 0.13795
+-0.574769 0.411874 0.683013 0.183013
+-0.600477 0.3734 0.669581 0.227292
+-0.623612 0.333328 0.653282 0.270598
+-0.644078 0.291828 0.634185 0.312745
+-0.661785 0.249078 0.612372 0.353553
+-0.676659 0.205262 0.587938 0.392847
+-0.688635 0.160567 0.560985 0.430459
+-0.697662 0.115185 0.531631 0.466228
+-0.703702 0.0693086 0.5 0.5
+-0.706728 0.0231359 0.466228 0.531631
+0.735586 0.0240806 -0.357504 -0.574913
+0.732436 0.0721387 -0.319137 -0.597064
+0.72615 0.119888 -0.279404 -0.616658
+0.716754 0.167124 -0.238474 -0.633611
+0.704289 0.213644 -0.196524 -0.647852
+0.688808 0.259249 -0.153731 -0.659318
+0.670378 0.303744 -0.110281 -0.667961
+0.649076 0.346939 -0.0663579 -0.673743
+0.624996 0.388647 -0.0221509 -0.676641
+0.598239 0.428692 0.0221509 -0.676641
+0.56892 0.466901 0.0663579 -0.673743
+0.537165 0.50311 0.110281 -0.667961
+0.50311 0.537165 0.153731 -0.659318
+0.466901 0.56892 0.196524 -0.647852
+0.428692 0.598239 0.238474 -0.633611
+0.388647 0.624996 0.279404 -0.616658
+0.346939 0.649077 0.319137 -0.597063
+0.303744 0.670378 0.357504 -0.574913
+0.259249 0.688808 0.394339 -0.5503
+0.213644 0.704289 0.429486 -0.523331
+0.167124 0.716754 0.462794 -0.49412
+0.119888 0.72615 0.49412 -0.462794
+0.0721386 0.732436 0.523331 -0.429486
+0.0240805 0.735586 0.5503 -0.394339
+-0.0240807 0.735586 0.574913 -0.357504
+-0.0721387 0.732436 0.597064 -0.319137
+-0.119888 0.72615 0.616658 -0.279404
+-0.167124 0.716754 0.633611 -0.238474
+-0.213644 0.704289 0.647852 -0.196524
+-0.259249 0.688808 0.659318 -0.153731
+-0.303744 0.670378 0.667961 -0.110281
+-0.346939 0.649077 0.673743 -0.066358
+-0.388647 0.624996 0.676641 -0.0221509
+-0.428692 0.598239 0.676641 0.022151
+-0.466901 0.56892 0.673743 0.0663579
+-0.50311 0.537165 0.667961 0.110281
+-0.537165 0.50311 0.659318 0.153731
+-0.56892 0.466901 0.647852 0.196524
+-0.598239 0.428692 0.633611 0.238474
+-0.624996 0.388647 0.616658 0.279404
+-0.649076 0.346939 0.597064 0.319137
+-0.670378 0.303744 0.574913 0.357504
+-0.688808 0.259249 0.5503 0.394339
+-0.704289 0.213644 0.523331 0.429486
+-0.716754 0.167124 0.49412 0.462794
+-0.72615 0.119888 0.462794 0.49412
+-0.732436 0.0721386 0.429486 0.523331
+-0.735586 0.0240807 0.394339 0.5503
+0.735586 0.0240806 -0.238474 -0.633611
+0.732436 0.0721387 -0.196524 -0.647852
+0.72615 0.119888 -0.153731 -0.659318
+0.716754 0.167124 -0.110281 -0.667961
+0.704289 0.213644 -0.0663579 -0.673743
+0.688808 0.259249 -0.0221509 -0.676641
+0.670378 0.303744 0.0221509 -0.676641
+0.649076 0.346939 0.0663579 -0.673743
+0.624996 0.388647 0.110281 -0.667961
+0.598239 0.428692 0.153731 -0.659318
+0.56892 0.466901 0.196524 -0.647852
+0.537165 0.50311 0.238474 -0.633611
+0.50311 0.537165 0.279404 -0.616658
+0.466901 0.56892 0.319137 -0.597064
+0.428692 0.598239 0.357504 -0.574913
+0.388647 0.624996 0.394339 -0.5503
+0.346939 0.649077 0.429486 -0.523331
+0.303744 0.670378 0.462794 -0.49412
+0.259249 0.688808 0.49412 -0.462794
+0.213644 0.704289 0.523331 -0.429486
+0.167124 0.716754 0.5503 -0.394339
+0.119888 0.72615 0.574913 -0.357504
+0.0721386 0.732436 0.597064 -0.319137
+0.0240805 0.735586 0.616658 -0.279404
+-0.0240807 0.735586 0.633611 -0.238474
+-0.0721387 0.732436 0.647852 -0.196524
+-0.119888 0.72615 0.659318 -0.153731
+-0.167124 0.716754 0.667961 -0.110281
+-0.213644 0.704289 0.673743 -0.0663578
+-0.259249 0.688808 0.676641 -0.0221508
+-0.303744 0.670378 0.676641 0.022151
+-0.346939 0.649077 0.673743 0.0663578
+-0.388647 0.624996 0.667961 0.110281
+-0.428692 0.598239 0.659318 0.153731
+-0.466901 0.56892 0.647852 0.196524
+-0.50311 0.537165 0.633611 0.238474
+-0.537165 0.50311 0.616658 0.279404
+-0.56892 0.466901 0.597064 0.319137
+-0.598239 0.428692 0.574913 0.357504
+-0.624996 0.388647 0.5503 0.394339
+-0.649076 0.346939 0.523331 0.429486
+-0.670378 0.303744 0.49412 0.462794
+-0.688808 0.259249 0.462794 0.49412
+-0.704289 0.213644 0.429486 0.523331
+-0.716754 0.167124 0.394339 0.5503
+-0.72615 0.119888 0.357504 0.574913
+-0.732436 0.0721386 0.319137 0.597064
+-0.735586 0.0240807 0.279404 0.616658
+0.763354 0.0249896 -0.167067 -0.623502
+0.760085 0.0748618 -0.12593 -0.633094
+0.753561 0.124413 -0.0842543 -0.639975
+0.743811 0.173432 -0.0422175 -0.644115
+0.730875 0.221709 4.81092e-10 -0.645497
+0.71481 0.269035 0.0422175 -0.644115
+0.695684 0.31521 0.0842543 -0.639975
+0.673578 0.360035 0.12593 -0.633094
+0.648589 0.403318 0.167067 -0.623502
+0.620822 0.444875 0.207488 -0.611241
+0.590396 0.484526 0.247021 -0.596362
+0.557443 0.522102 0.285496 -0.578929
+0.522102 0.557443 0.322749 -0.559017
+0.484526 0.590396 0.358619 -0.536711
+0.444875 0.620822 0.392954 -0.512107
+0.403318 0.648589 0.425606 -0.48531
+0.360035 0.673579 0.456436 -0.456435
+0.31521 0.695684 0.485311 -0.425606
+0.269035 0.71481 0.512107 -0.392954
+0.221709 0.730875 0.536711 -0.358619
+0.173432 0.743811 0.559017 -0.322749
+0.124413 0.753561 0.578929 -0.285496
+0.0748617 0.760085 0.596362 -0.247021
+0.0249895 0.763354 0.611241 -0.207488
+-0.0249897 0.763354 0.623502 -0.167067
+-0.0748619 0.760085 0.633094 -0.12593
+-0.124414 0.753561 0.639975 -0.0842542
+-0.173432 0.743811 0.644115 -0.0422175
+-0.221709 0.730875 0.645497 1.05646e-07
+-0.269036 0.71481 0.644115 0.0422176
+-0.31521 0.695684 0.639975 0.0842544
+-0.360035 0.673579 0.633094 0.12593
+-0.403318 0.648589 0.623502 0.167067
+-0.444875 0.620822 0.611241 0.207488
+-0.484526 0.590397 0.596362 0.247021
+-0.522102 0.557443 0.578929 0.285496
+-0.557443 0.522102 0.559017 0.322749
+-0.590397 0.484526 0.536711 0.358619
+-0.620822 0.444875 0.512107 0.392954
+-0.648589 0.403318 0.485311 0.425606
+-0.673578 0.360035 0.456436 0.456435
+-0.695684 0.31521 0.425606 0.485311
+-0.71481 0.269035 0.392954 0.512107
+-0.730875 0.221709 0.358619 0.536711
+-0.743811 0.173432 0.322749 0.559017
+-0.753561 0.124414 0.285496 0.578929
+-0.760085 0.0748618 0.247021 0.596362
+-0.763354 0.0249897 0.207488 0.611241
+0.763354 0.0249896 -0.285496 -0.578929
+0.760085 0.0748618 -0.247021 -0.596362
+0.753561 0.124413 -0.207488 -0.611241
+0.743811 0.173432 -0.167067 -0.623502
+0.730875 0.221709 -0.12593 -0.633094
+0.71481 0.269035 -0.0842543 -0.639975
+0.695684 0.31521 -0.0422175 -0.644115
+0.673578 0.360035 1.36267e-08 -0.645497
+0.648589 0.403318 0.0422175 -0.644115
+0.620822 0.444875 0.0842543 -0.639975
+0.590396 0.484526 0.12593 -0.633094
+0.557443 0.522102 0.167067 -0.623502
+0.522102 0.557443 0.207488 -0.611241
+0.484526 0.590396 0.247021 -0.596362
+0.444875 0.620822 0.285496 -0.578929
+0.403318 0.648589 0.322749 -0.559017
+0.360035 0.673579 0.358619 -0.536711
+0.31521 0.695684 0.392954 -0.512107
+0.269035 0.71481 0.425606 -0.48531
+0.221709 0.730875 0.456435 -0.456435
+0.173432 0.743811 0.485311 -0.425606
+0.124413 0.753561 0.512107 -0.392954
+0.0748617 0.760085 0.536711 -0.358619
+0.0249895 0.763354 0.559017 -0.322749
+-0.0249897 0.763354 0.578929 -0.285496
+-0.0748619 0.760085 0.596362 -0.247021
+-0.124414 0.753561 0.611241 -0.207488
+-0.173432 0.743811 0.623502 -0.167067
+-0.221709 0.730875 0.633094 -0.12593
+-0.269036 0.71481 0.639975 -0.0842542
+-0.31521 0.695684 0.644115 -0.0422174
+-0.360035 0.673579 0.645497 -7.35816e-08
+-0.403318 0.648589 0.644115 0.0422175
+-0.444875 0.620822 0.639975 0.0842544
+-0.484526 0.590397 0.633094 0.12593
+-0.522102 0.557443 0.623502 0.167067
+-0.557443 0.522102 0.611241 0.207488
+-0.590397 0.484526 0.596362 0.247021
+-0.620822 0.444875 0.578929 0.285496
+-0.648589 0.403318 0.559017 0.322749
+-0.673578 0.360035 0.536711 0.358619
+-0.695684 0.31521 0.512107 0.392954
+-0.71481 0.269035 0.48531 0.425606
+-0.730875 0.221709 0.456435 0.456435
+-0.743811 0.173432 0.425606 0.485311
+-0.753561 0.124414 0.392954 0.512107
+-0.760085 0.0748618 0.358619 0.536711
+-0.763354 0.0249897 0.322749 0.559017
+0.790146 0.0258667 -0.215708 -0.573123
+0.786763 0.0774894 -0.177762 -0.586004
+0.78001 0.12878 -0.139055 -0.596375
+0.769917 0.17952 -0.0997527 -0.604193
+0.756528 0.22949 -0.060023 -0.609424
+0.739899 0.278478 -0.0200363 -0.612045
+0.720101 0.326274 0.0200363 -0.612045
+0.69722 0.372672 0.060023 -0.609424
+0.671353 0.417474 0.0997527 -0.604193
+0.642612 0.460489 0.139055 -0.596375
+0.611118 0.501532 0.177762 -0.586004
+0.577008 0.540427 0.215708 -0.573123
+0.540427 0.577008 0.25273 -0.557788
+0.501532 0.611118 0.28867 -0.540064
+0.460489 0.642612 0.323374 -0.520028
+0.417474 0.671353 0.356693 -0.497765
+0.372672 0.69722 0.388485 -0.47337
+0.326274 0.720101 0.418613 -0.446949
+0.278478 0.739899 0.446949 -0.418613
+0.22949 0.756528 0.47337 -0.388485
+0.17952 0.769917 0.497765 -0.356693
+0.12878 0.78001 0.520028 -0.323374
+0.0774893 0.786763 0.540064 -0.28867
+0.0258666 0.790146 0.557788 -0.25273
+-0.0258668 0.790146 0.573123 -0.215708
+-0.0774894 0.786763 0.586004 -0.177762
+-0.12878 0.78001 0.596375 -0.139055
+-0.17952 0.769917 0.604193 -0.0997527
+-0.22949 0.756528 0.609424 -0.0600229
+-0.278478 0.739899 0.612045 -0.0200362
+-0.326274 0.720101 0.612045 0.0200363
+-0.372672 0.69722 0.609424 0.0600229
+-0.417474 0.671353 0.604193 0.0997527
+-0.460489 0.642612 0.596375 0.139055
+-0.501532 0.611118 0.586004 0.177762
+-0.540427 0.577008 0.573123 0.215708
+-0.577008 0.540427 0.557788 0.25273
+-0.611118 0.501532 0.540064 0.28867
+-0.642612 0.460489 0.520028 0.323374
+-0.671353 0.417474 0.497765 0.356693
+-0.69722 0.372672 0.47337 0.388485
+-0.720101 0.326274 0.446949 0.418613
+-0.739899 0.278478 0.418613 0.446949
+-0.756528 0.22949 0.388485 0.47337
+-0.769917 0.179519 0.356693 0.497765
+-0.78001 0.12878 0.323374 0.520028
+-0.786763 0.0774893 0.28867 0.540064
+-0.790146 0.0258668 0.25273 0.557788
+0.735586 0.0240806 0.0221509 -0.676641
+0.732436 0.0721387 0.0663579 -0.673743
+0.72615 0.119888 0.110281 -0.667961
+0.716754 0.167124 0.153731 -0.659318
+0.704289 0.213644 0.196524 -0.647852
+0.688808 0.259249 0.238474 -0.633611
+0.670378 0.303744 0.279404 -0.616658
+0.649076 0.346939 0.319137 -0.597064
+0.624996 0.388647 0.357504 -0.574913
+0.598239 0.428692 0.394339 -0.5503
+0.56892 0.466901 0.429486 -0.523331
+0.537165 0.50311 0.462794 -0.49412
+0.50311 0.537165 0.49412 -0.462794
+0.466901 0.56892 0.523331 -0.429486
+0.428692 0.598239 0.5503 -0.394339
+0.388647 0.624996 0.574913 -0.357504
+0.346939 0.649077 0.597064 -0.319137
+0.303744 0.670378 0.616658 -0.279404
+0.259249 0.688808 0.633611 -0.238474
+0.213644 0.704289 0.647852 -0.196524
+0.167124 0.716754 0.659318 -0.153731
+0.119888 0.72615 0.667961 -0.110281
+0.0721386 0.732436 0.673743 -0.0663578
+0.0240805 0.735586 0.676641 -0.0221508
+-0.0240807 0.735586 0.676641 0.022151
+-0.0721387 0.732436 0.673743 0.066358
+-0.119888 0.72615 0.667961 0.110281
+-0.167124 0.716754 0.659318 0.153731
+-0.213644 0.704289 0.647852 0.196524
+-0.259249 0.688808 0.633611 0.238475
+-0.303744 0.670378 0.616658 0.279404
+-0.346939 0.649077 0.597064 0.319137
+-0.388647 0.624996 0.574913 0.357504
+-0.428692 0.598239 0.5503 0.394339
+-0.466901 0.56892 0.523331 0.429486
+-0.50311 0.537165 0.49412 0.462794
+-0.537165 0.50311 0.462794 0.49412
+-0.56892 0.466901 0.429486 0.523331
+-0.598239 0.428692 0.394339 0.5503
+-0.624996 0.388647 0.357504 0.574913
+-0.649076 0.346939 0.319137 0.597063
+-0.670378 0.303744 0.279404 0.616658
+-0.688808 0.259249 0.238474 0.633611
+-0.704289 0.213644 0.196524 0.647852
+-0.716754 0.167124 0.153731 0.659318
+-0.72615 0.119888 0.110281 0.667961
+-0.732436 0.0721386 0.0663579 0.673743
+-0.735586 0.0240807 0.022151 0.676641
+0.763354 0.0249896 0.0842543 -0.639975
+0.760085 0.0748618 0.12593 -0.633094
+0.753561 0.124413 0.167067 -0.623502
+0.743811 0.173432 0.207488 -0.611241
+0.730875 0.221709 0.247021 -0.596362
+0.71481 0.269035 0.285496 -0.578929
+0.695684 0.31521 0.322749 -0.559017
+0.673578 0.360035 0.358619 -0.536711
+0.648589 0.403318 0.392954 -0.512107
+0.620822 0.444875 0.425606 -0.485311
+0.590396 0.484526 0.456435 -0.456435
+0.557443 0.522102 0.485311 -0.425606
+0.522102 0.557443 0.512107 -0.392954
+0.484526 0.590396 0.536711 -0.358619
+0.444875 0.620822 0.559017 -0.322749
+0.403318 0.648589 0.578929 -0.285496
+0.360035 0.673579 0.596362 -0.247021
+0.31521 0.695684 0.611241 -0.207488
+0.269035 0.71481 0.623502 -0.167067
+0.221709 0.730875 0.633094 -0.12593
+0.173432 0.743811 0.639975 -0.0842543
+0.124413 0.753561 0.644115 -0.0422175
+0.0748617 0.760085 0.645497 7.93547e-08
+0.0249895 0.763354 0.644115 0.0422176
+-0.0249897 0.763354 0.639975 0.0842544
+-0.0748619 0.760085 0.633094 0.12593
+-0.124414 0.753561 0.623502 0.167067
+-0.173432 0.743811 0.611241 0.207488
+-0.221709 0.730875 0.596362 0.247021
+-0.269036 0.71481 0.578929 0.285496
+-0.31521 0.695684 0.559017 0.322749
+-0.360035 0.673579 0.536711 0.358619
+-0.403318 0.648589 0.512107 0.392954
+-0.444875 0.620822 0.48531 0.425606
+-0.484526 0.590397 0.456435 0.456435
+-0.522102 0.557443 0.425606 0.485311
+-0.557443 0.522102 0.392954 0.512107
+-0.590397 0.484526 0.358619 0.536711
+-0.620822 0.444875 0.322749 0.559017
+-0.648589 0.403318 0.285496 0.578929
+-0.673578 0.360035 0.247021 0.596362
+-0.695684 0.31521 0.207488 0.611241
+-0.71481 0.269035 0.167067 0.623502
+-0.730875 0.221709 0.12593 0.633094
+-0.743811 0.173432 0.0842542 0.639975
+-0.753561 0.124414 0.0422176 0.644115
+-0.760085 0.0748618 -3.0621e-08 0.645497
+-0.763354 0.0249897 -0.0422175 0.644115
+0.763354 0.0249896 -0.0422175 -0.644115
+0.760085 0.0748618 1.76347e-09 -0.645497
+0.753561 0.124413 0.0422175 -0.644115
+0.743811 0.173432 0.0842543 -0.639975
+0.730875 0.221709 0.12593 -0.633094
+0.71481 0.269035 0.167067 -0.623502
+0.695684 0.31521 0.207488 -0.611241
+0.673578 0.360035 0.247021 -0.596362
+0.648589 0.403318 0.285496 -0.578929
+0.620822 0.444875 0.322749 -0.559017
+0.590396 0.484526 0.358619 -0.536711
+0.557443 0.522102 0.392954 -0.512107
+0.522102 0.557443 0.425606 -0.485311
+0.484526 0.590396 0.456435 -0.456435
+0.444875 0.620822 0.485311 -0.425606
+0.403318 0.648589 0.512107 -0.392954
+0.360035 0.673579 0.536711 -0.358619
+0.31521 0.695684 0.559017 -0.322749
+0.269035 0.71481 0.578929 -0.285496
+0.221709 0.730875 0.596362 -0.247021
+0.173432 0.743811 0.611241 -0.207488
+0.124413 0.753561 0.623502 -0.167067
+0.0748617 0.760085 0.633094 -0.12593
+0.0249895 0.763354 0.639975 -0.0842542
+-0.0249897 0.763354 0.644115 -0.0422175
+-0.0748619 0.760085 0.645497 5.40257e-08
+-0.124414 0.753561 0.644115 0.0422176
+-0.173432 0.743811 0.639975 0.0842543
+-0.221709 0.730875 0.633094 0.12593
+-0.269036 0.71481 0.623502 0.167067
+-0.31521 0.695684 0.611241 0.207488
+-0.360035 0.673579 0.596362 0.247021
+-0.403318 0.648589 0.578929 0.285496
+-0.444875 0.620822 0.559017 0.322749
+-0.484526 0.590397 0.536711 0.358619
+-0.522102 0.557443 0.512107 0.392954
+-0.557443 0.522102 0.485311 0.425606
+-0.590397 0.484526 0.456435 0.456435
+-0.620822 0.444875 0.425606 0.48531
+-0.648589 0.403318 0.392954 0.512107
+-0.673578 0.360035 0.358619 0.536711
+-0.695684 0.31521 0.322749 0.559017
+-0.71481 0.269035 0.285496 0.578929
+-0.730875 0.221709 0.247021 0.596362
+-0.743811 0.173432 0.207488 0.611241
+-0.753561 0.124414 0.167067 0.623502
+-0.760085 0.0748618 0.12593 0.633094
+-0.763354 0.0249897 0.0842543 0.639975
+0.790146 0.0258667 0.0200363 -0.612045
+0.786763 0.0774894 0.060023 -0.609424
+0.78001 0.12878 0.0997527 -0.604193
+0.769917 0.17952 0.139055 -0.596375
+0.756528 0.22949 0.177762 -0.586004
+0.739899 0.278478 0.215708 -0.573123
+0.720101 0.326274 0.25273 -0.557788
+0.69722 0.372672 0.28867 -0.540064
+0.671353 0.417474 0.323374 -0.520028
+0.642612 0.460489 0.356693 -0.497765
+0.611118 0.501532 0.388485 -0.47337
+0.577008 0.540427 0.418613 -0.446949
+0.540427 0.577008 0.446949 -0.418613
+0.501532 0.611118 0.47337 -0.388485
+0.460489 0.642612 0.497765 -0.356693
+0.417474 0.671353 0.520028 -0.323374
+0.372672 0.69722 0.540064 -0.28867
+0.326274 0.720101 0.557788 -0.25273
+0.278478 0.739899 0.573123 -0.215708
+0.22949 0.756528 0.586004 -0.177762
+0.17952 0.769917 0.596375 -0.139055
+0.12878 0.78001 0.604193 -0.0997526
+0.0774893 0.786763 0.609424 -0.0600229
+0.0258666 0.790146 0.612045 -0.0200362
+-0.0258668 0.790146 0.612045 0.0200363
+-0.0774894 0.786763 0.609424 0.060023
+-0.12878 0.78001 0.604193 0.0997527
+-0.17952 0.769917 0.596375 0.139055
+-0.22949 0.756528 0.586004 0.177762
+-0.278478 0.739899 0.573123 0.215708
+-0.326274 0.720101 0.557788 0.25273
+-0.372672 0.69722 0.540064 0.28867
+-0.417474 0.671353 0.520028 0.323374
+-0.460489 0.642612 0.497765 0.356693
+-0.501532 0.611118 0.47337 0.388485
+-0.540427 0.577008 0.446949 0.418613
+-0.577008 0.540427 0.418613 0.446949
+-0.611118 0.501532 0.388485 0.47337
+-0.642612 0.460489 0.356693 0.497765
+-0.671353 0.417474 0.323374 0.520028
+-0.69722 0.372672 0.28867 0.540064
+-0.720101 0.326274 0.25273 0.557788
+-0.739899 0.278478 0.215708 0.573123
+-0.756528 0.22949 0.177762 0.586004
+-0.769917 0.179519 0.139055 0.596375
+-0.78001 0.12878 0.0997527 0.604193
+-0.786763 0.0774893 0.060023 0.609424
+-0.790146 0.0258668 0.0200363 0.612045
+0.790146 0.0258667 0.139055 -0.596375
+0.786763 0.0774894 0.177762 -0.586004
+0.78001 0.12878 0.215708 -0.573123
+0.769917 0.17952 0.25273 -0.557788
+0.756528 0.22949 0.28867 -0.540064
+0.739899 0.278478 0.323374 -0.520028
+0.720101 0.326274 0.356693 -0.497765
+0.69722 0.372672 0.388485 -0.47337
+0.671353 0.417474 0.418613 -0.446949
+0.642612 0.460489 0.446949 -0.418613
+0.611118 0.501532 0.47337 -0.388485
+0.577008 0.540427 0.497765 -0.356693
+0.540427 0.577008 0.520028 -0.323374
+0.501532 0.611118 0.540064 -0.28867
+0.460489 0.642612 0.557788 -0.25273
+0.417474 0.671353 0.573123 -0.215708
+0.372672 0.69722 0.586004 -0.177762
+0.326274 0.720101 0.596375 -0.139055
+0.278478 0.739899 0.604193 -0.0997527
+0.22949 0.756528 0.609424 -0.060023
+0.17952 0.769917 0.612045 -0.0200362
+0.12878 0.78001 0.612045 0.0200363
+0.0774893 0.786763 0.609424 0.0600231
+0.0258666 0.790146 0.604193 0.0997528
+-0.0258668 0.790146 0.596375 0.139055
+-0.0774894 0.786763 0.586004 0.177762
+-0.12878 0.78001 0.573123 0.215708
+-0.17952 0.769917 0.557788 0.25273
+-0.22949 0.756528 0.540064 0.28867
+-0.278478 0.739899 0.520028 0.323374
+-0.326274 0.720101 0.497765 0.356693
+-0.372672 0.69722 0.47337 0.388485
+-0.417474 0.671353 0.446949 0.418613
+-0.460489 0.642612 0.418613 0.446949
+-0.501532 0.611118 0.388485 0.47337
+-0.540427 0.577008 0.356693 0.497765
+-0.577008 0.540427 0.323374 0.520028
+-0.611118 0.501532 0.28867 0.540064
+-0.642612 0.460489 0.25273 0.557788
+-0.671353 0.417474 0.215708 0.573123
+-0.69722 0.372672 0.177762 0.586004
+-0.720101 0.326274 0.139055 0.596375
+-0.739899 0.278478 0.0997526 0.604193
+-0.756528 0.22949 0.060023 0.609424
+-0.769917 0.179519 0.0200362 0.612045
+-0.78001 0.12878 -0.0200362 0.612045
+-0.786763 0.0774893 -0.060023 0.609424
+-0.790146 0.0258668 -0.0997526 0.604193
+0.816059 0.026715 0.185583 -0.54671
+0.812565 0.0800307 0.220942 -0.533402
+0.805591 0.133004 0.255355 -0.51781
+0.795167 0.185407 0.288675 -0.5
+0.781339 0.237016 0.320759 -0.480049
+0.764164 0.287611 0.351469 -0.458043
+0.743717 0.336974 0.380673 -0.434075
+0.720086 0.384894 0.408248 -0.408248
+0.693371 0.431166 0.434075 -0.380673
+0.663687 0.475591 0.458043 -0.351469
+0.63116 0.51798 0.480049 -0.320759
+0.595932 0.558151 0.5 -0.288675
+0.558151 0.595932 0.51781 -0.255355
+0.51798 0.63116 0.533402 -0.220942
+0.475591 0.663687 0.54671 -0.185583
+0.431166 0.693371 0.557678 -0.149429
+0.384894 0.720086 0.566257 -0.112635
+0.336974 0.743717 0.572411 -0.0753593
+0.287611 0.764164 0.576114 -0.0377605
+0.237016 0.781339 0.57735 2.48065e-08
+0.185407 0.795167 0.576114 0.0377605
+0.133003 0.805591 0.572411 0.0753594
+0.0800306 0.812565 0.566257 0.112636
+0.0267149 0.816059 0.557678 0.149429
+-0.0267151 0.816059 0.54671 0.185583
+-0.0800307 0.812565 0.533402 0.220942
+-0.133004 0.805591 0.51781 0.255356
+-0.185407 0.795167 0.5 0.288675
+-0.237017 0.781338 0.480049 0.320759
+-0.287611 0.764164 0.458043 0.351469
+-0.336974 0.743717 0.434075 0.380674
+-0.384894 0.720086 0.408248 0.408248
+-0.431166 0.693371 0.380673 0.434075
+-0.475591 0.663686 0.351469 0.458043
+-0.51798 0.63116 0.320759 0.480049
+-0.558151 0.595931 0.288675 0.5
+-0.595931 0.558151 0.255356 0.51781
+-0.63116 0.51798 0.220942 0.533402
+-0.663686 0.475591 0.185583 0.54671
+-0.693371 0.431166 0.149429 0.557678
+-0.720086 0.384894 0.112636 0.566257
+-0.743717 0.336974 0.0753593 0.572411
+-0.764164 0.287611 0.0377605 0.576114
+-0.781339 0.237016 1.87823e-08 0.57735
+-0.795167 0.185407 -0.0377606 0.576114
+-0.805591 0.133004 -0.0753593 0.572411
+-0.812565 0.0800306 -0.112635 0.566257
+-0.816059 0.0267151 -0.149429 0.557678
+0.816059 0.026715 0.0753593 -0.572411
+0.812565 0.0800307 0.112635 -0.566257
+0.805591 0.133004 0.149429 -0.557678
+0.795167 0.185407 0.185583 -0.54671
+0.781339 0.237016 0.220942 -0.533402
+0.764164 0.287611 0.255355 -0.51781
+0.743717 0.336974 0.288675 -0.5
+0.720086 0.384894 0.320759 -0.480049
+0.693371 0.431166 0.351469 -0.458043
+0.663687 0.475591 0.380673 -0.434075
+0.63116 0.51798 0.408248 -0.408248
+0.595932 0.558151 0.434075 -0.380673
+0.558151 0.595932 0.458043 -0.351469
+0.51798 0.63116 0.480049 -0.320759
+0.475591 0.663687 0.5 -0.288675
+0.431166 0.693371 0.51781 -0.255355
+0.384894 0.720086 0.533402 -0.220942
+0.336974 0.743717 0.54671 -0.185583
+0.287611 0.764164 0.557678 -0.149429
+0.237016 0.781339 0.566257 -0.112635
+0.185407 0.795167 0.572411 -0.0753593
+0.133003 0.805591 0.576114 -0.0377604
+0.0800306 0.812565 0.57735 7.0977e-08
+0.0267149 0.816059 0.576114 0.0377606
+-0.0267151 0.816059 0.572411 0.0753594
+-0.0800307 0.812565 0.566257 0.112635
+-0.133004 0.805591 0.557678 0.149429
+-0.185407 0.795167 0.54671 0.185583
+-0.237017 0.781338 0.533402 0.220942
+-0.287611 0.764164 0.51781 0.255356
+-0.336974 0.743717 0.5 0.288675
+-0.384894 0.720086 0.480049 0.320759
+-0.431166 0.693371 0.458043 0.351469
+-0.475591 0.663686 0.434075 0.380674
+-0.51798 0.63116 0.408248 0.408248
+-0.558151 0.595931 0.380673 0.434075
+-0.595931 0.558151 0.351469 0.458043
+-0.63116 0.51798 0.320759 0.480049
+-0.663686 0.475591 0.288675 0.5
+-0.693371 0.431166 0.255355 0.51781
+-0.720086 0.384894 0.220942 0.533402
+-0.743717 0.336974 0.185583 0.54671
+-0.764164 0.287611 0.149429 0.557678
+-0.781339 0.237016 0.112635 0.566257
+-0.795167 0.185407 0.0753593 0.572411
+-0.805591 0.133004 0.0377605 0.576114
+-0.812565 0.0800306 -2.73883e-08 0.57735
+-0.816059 0.0267151 -0.0377605 0.576114
+0.841175 0.0275372 0.122635 -0.525954
+0.837573 0.0824937 0.156772 -0.516807
+0.830384 0.137097 0.190237 -0.505447
+0.81964 0.191113 0.222887 -0.491923
+0.805385 0.244311 0.254583 -0.476292
+0.787682 0.296463 0.285189 -0.458622
+0.766606 0.347345 0.314574 -0.438987
+0.742247 0.396739 0.342612 -0.417473
+0.71471 0.444435 0.369182 -0.394172
+0.684112 0.490228 0.394172 -0.369182
+0.650585 0.533921 0.417473 -0.342612
+0.614272 0.575329 0.438987 -0.314574
+0.575329 0.614272 0.458622 -0.285189
+0.533922 0.650585 0.476292 -0.254583
+0.490228 0.684112 0.491923 -0.222887
+0.444435 0.71471 0.505447 -0.190237
+0.396739 0.742247 0.516807 -0.156772
+0.347345 0.766606 0.525954 -0.122635
+0.296463 0.787682 0.532848 -0.0879736
+0.244311 0.805385 0.537461 -0.0529353
+0.191113 0.81964 0.539773 -0.0176703
+0.137097 0.830384 0.539773 0.0176704
+0.0824936 0.837573 0.537461 0.0529354
+0.0275371 0.841175 0.532848 0.0879737
+-0.0275373 0.841175 0.525954 0.122635
+-0.0824938 0.837573 0.516807 0.156772
+-0.137097 0.830384 0.505447 0.190237
+-0.191113 0.81964 0.491923 0.222887
+-0.244311 0.805385 0.476292 0.254583
+-0.296463 0.787682 0.458622 0.285189
+-0.347345 0.766606 0.438987 0.314574
+-0.396739 0.742247 0.417473 0.342611
+-0.444435 0.71471 0.394172 0.369182
+-0.490228 0.684112 0.369182 0.394172
+-0.533921 0.650585 0.342612 0.417473
+-0.575329 0.614272 0.314574 0.438987
+-0.614272 0.575329 0.285189 0.458622
+-0.650585 0.533921 0.254583 0.476292
+-0.684112 0.490228 0.222887 0.491923
+-0.71471 0.444435 0.190237 0.505447
+-0.742247 0.39674 0.156772 0.516807
+-0.766606 0.347345 0.122635 0.525954
+-0.787682 0.296463 0.0879736 0.532848
+-0.805385 0.244311 0.0529353 0.537461
+-0.81964 0.191113 0.0176703 0.539773
+-0.830384 0.137097 -0.0176703 0.539773
+-0.837573 0.0824937 -0.0529353 0.537461
+-0.841175 0.0275373 -0.0879736 0.532848
+0.790146 0.0258667 -0.0997527 -0.604193
+0.786763 0.0774894 -0.060023 -0.609424
+0.78001 0.12878 -0.0200363 -0.612045
+0.769917 0.17952 0.0200363 -0.612045
+0.756528 0.22949 0.060023 -0.609424
+0.739899 0.278478 0.0997527 -0.604193
+0.720101 0.326274 0.139055 -0.596375
+0.69722 0.372672 0.177762 -0.586004
+0.671353 0.417474 0.215708 -0.573123
+0.642612 0.460489 0.25273 -0.557788
+0.611118 0.501532 0.28867 -0.540064
+0.577008 0.540427 0.323374 -0.520028
+0.540427 0.577008 0.356693 -0.497765
+0.501532 0.611118 0.388485 -0.47337
+0.460489 0.642612 0.418613 -0.446949
+0.417474 0.671353 0.446949 -0.418613
+0.372672 0.69722 0.47337 -0.388485
+0.326274 0.720101 0.497765 -0.356693
+0.278478 0.739899 0.520028 -0.323374
+0.22949 0.756528 0.540064 -0.28867
+0.17952 0.769917 0.557788 -0.25273
+0.12878 0.78001 0.573123 -0.215708
+0.0774893 0.786763 0.586004 -0.177762
+0.0258666 0.790146 0.596375 -0.139055
+-0.0258668 0.790146 0.604193 -0.0997526
+-0.0774894 0.786763 0.609424 -0.0600229
+-0.12878 0.78001 0.612045 -0.0200362
+-0.17952 0.769917 0.612045 0.0200363
+-0.22949 0.756528 0.609424 0.0600231
+-0.278478 0.739899 0.604193 0.0997528
+-0.326274 0.720101 0.596375 0.139055
+-0.372672 0.69722 0.586004 0.177762
+-0.417474 0.671353 0.573123 0.215708
+-0.460489 0.642612 0.557788 0.25273
+-0.501532 0.611118 0.540064 0.28867
+-0.540427 0.577008 0.520028 0.323374
+-0.577008 0.540427 0.497765 0.356693
+-0.611118 0.501532 0.47337 0.388485
+-0.642612 0.460489 0.446949 0.418613
+-0.671353 0.417474 0.418613 0.446949
+-0.69722 0.372672 0.388485 0.47337
+-0.720101 0.326274 0.356693 0.497765
+-0.739899 0.278478 0.323374 0.520028
+-0.756528 0.22949 0.28867 0.540064
+-0.769917 0.179519 0.25273 0.557788
+-0.78001 0.12878 0.215708 0.573123
+-0.786763 0.0774893 0.177762 0.586004
+-0.790146 0.0258668 0.139055 0.596375
+0.816059 0.026715 -0.0377605 -0.576114
+0.812565 0.0800307 1.5773e-09 -0.57735
+0.805591 0.133004 0.0377605 -0.576114
+0.795167 0.185407 0.0753593 -0.572411
+0.781339 0.237016 0.112635 -0.566257
+0.764164 0.287611 0.149429 -0.557678
+0.743717 0.336974 0.185583 -0.54671
+0.720086 0.384894 0.220942 -0.533402
+0.693371 0.431166 0.255355 -0.51781
+0.663687 0.475591 0.288675 -0.5
+0.63116 0.51798 0.320759 -0.480049
+0.595932 0.558151 0.351469 -0.458043
+0.558151 0.595932 0.380673 -0.434075
+0.51798 0.63116 0.408248 -0.408248
+0.475591 0.663687 0.434075 -0.380673
+0.431166 0.693371 0.458043 -0.351469
+0.384894 0.720086 0.480049 -0.320759
+0.336974 0.743717 0.5 -0.288675
+0.287611 0.764164 0.51781 -0.255355
+0.237016 0.781339 0.533402 -0.220942
+0.185407 0.795167 0.54671 -0.185583
+0.133003 0.805591 0.557678 -0.149429
+0.0800306 0.812565 0.566257 -0.112635
+0.0267149 0.816059 0.572411 -0.0753593
+-0.0267151 0.816059 0.576114 -0.0377605
+-0.0800307 0.812565 0.57735 4.83221e-08
+-0.133004 0.805591 0.576114 0.0377606
+-0.185407 0.795167 0.572411 0.0753594
+-0.237017 0.781338 0.566257 0.112636
+-0.287611 0.764164 0.557678 0.149429
+-0.336974 0.743717 0.54671 0.185583
+-0.384894 0.720086 0.533402 0.220942
+-0.431166 0.693371 0.51781 0.255355
+-0.475591 0.663686 0.5 0.288675
+-0.51798 0.63116 0.480049 0.320759
+-0.558151 0.595931 0.458043 0.351469
+-0.595931 0.558151 0.434075 0.380673
+-0.63116 0.51798 0.408248 0.408248
+-0.663686 0.475591 0.380674 0.434075
+-0.693371 0.431166 0.351469 0.458043
+-0.720086 0.384894 0.320759 0.480049
+-0.743717 0.336974 0.288675 0.5
+-0.764164 0.287611 0.255355 0.51781
+-0.781339 0.237016 0.220942 0.533402
+-0.795167 0.185407 0.185583 0.54671
+-0.805591 0.133004 0.149429 0.557678
+-0.812565 0.0800306 0.112635 0.566257
+-0.816059 0.0267151 0.0753594 0.572411
+0.816059 0.026715 -0.149429 -0.557678
+0.812565 0.0800307 -0.112635 -0.566257
+0.805591 0.133004 -0.0753593 -0.572411
+0.795167 0.185407 -0.0377605 -0.576114
+0.781339 0.237016 4.30302e-10 -0.57735
+0.764164 0.287611 0.0377605 -0.576114
+0.743717 0.336974 0.0753593 -0.572411
+0.720086 0.384894 0.112635 -0.566257
+0.693371 0.431166 0.149429 -0.557678
+0.663687 0.475591 0.185583 -0.54671
+0.63116 0.51798 0.220942 -0.533402
+0.595932 0.558151 0.255356 -0.51781
+0.558151 0.595932 0.288675 -0.5
+0.51798 0.63116 0.320759 -0.480049
+0.475591 0.663687 0.351469 -0.458043
+0.431166 0.693371 0.380674 -0.434075
+0.384894 0.720086 0.408248 -0.408248
+0.336974 0.743717 0.434075 -0.380673
+0.287611 0.764164 0.458043 -0.351469
+0.237016 0.781339 0.480049 -0.320759
+0.185407 0.795167 0.5 -0.288675
+0.133003 0.805591 0.51781 -0.255355
+0.0800306 0.812565 0.533402 -0.220942
+0.0267149 0.816059 0.54671 -0.185583
+-0.0267151 0.816059 0.557678 -0.149429
+-0.0800307 0.812565 0.566257 -0.112635
+-0.133004 0.805591 0.572411 -0.0753593
+-0.185407 0.795167 0.576114 -0.0377605
+-0.237017 0.781338 0.57735 9.44926e-08
+-0.287611 0.764164 0.576114 0.0377606
+-0.336974 0.743717 0.572411 0.0753594
+-0.384894 0.720086 0.566257 0.112635
+-0.431166 0.693371 0.557678 0.149429
+-0.475591 0.663686 0.54671 0.185583
+-0.51798 0.63116 0.533402 0.220942
+-0.558151 0.595931 0.51781 0.255356
+-0.595931 0.558151 0.5 0.288675
+-0.63116 0.51798 0.480049 0.320759
+-0.663686 0.475591 0.458043 0.351469
+-0.693371 0.431166 0.434075 0.380673
+-0.720086 0.384894 0.408248 0.408248
+-0.743717 0.336974 0.380673 0.434075
+-0.764164 0.287611 0.351469 0.458043
+-0.781339 0.237016 0.320759 0.480049
+-0.795167 0.185407 0.288675 0.5
+-0.805591 0.133004 0.255356 0.51781
+-0.812565 0.0800306 0.220942 0.533402
+-0.816059 0.0267151 0.185583 0.54671
+0.841175 0.0275372 -0.0879736 -0.532848
+0.837573 0.0824937 -0.0529353 -0.537461
+0.830384 0.137097 -0.0176703 -0.539773
+0.81964 0.191113 0.0176703 -0.539773
+0.805385 0.244311 0.0529353 -0.537461
+0.787682 0.296463 0.0879736 -0.532848
+0.766606 0.347345 0.122635 -0.525954
+0.742247 0.396739 0.156772 -0.516807
+0.71471 0.444435 0.190237 -0.505447
+0.684112 0.490228 0.222887 -0.491923
+0.650585 0.533921 0.254583 -0.476292
+0.614272 0.575329 0.285189 -0.458622
+0.575329 0.614272 0.314574 -0.438987
+0.533922 0.650585 0.342612 -0.417473
+0.490228 0.684112 0.369182 -0.394172
+0.444435 0.71471 0.394172 -0.369182
+0.396739 0.742247 0.417473 -0.342611
+0.347345 0.766606 0.438987 -0.314574
+0.296463 0.787682 0.458622 -0.285189
+0.244311 0.805385 0.476292 -0.254583
+0.191113 0.81964 0.491923 -0.222887
+0.137097 0.830384 0.505447 -0.190237
+0.0824936 0.837573 0.516807 -0.156772
+0.0275371 0.841175 0.525954 -0.122635
+-0.0275373 0.841175 0.532848 -0.0879736
+-0.0824938 0.837573 0.537461 -0.0529353
+-0.137097 0.830384 0.539773 -0.0176703
+-0.191113 0.81964 0.539773 0.0176704
+-0.244311 0.805385 0.537461 0.0529354
+-0.296463 0.787682 0.532848 0.0879737
+-0.347345 0.766606 0.525954 0.122635
+-0.396739 0.742247 0.516807 0.156772
+-0.444435 0.71471 0.505447 0.190237
+-0.490228 0.684112 0.491923 0.222887
+-0.533921 0.650585 0.476292 0.254583
+-0.575329 0.614272 0.458622 0.285189
+-0.614272 0.575329 0.438987 0.314574
+-0.650585 0.533921 0.417473 0.342612
+-0.684112 0.490228 0.394172 0.369182
+-0.71471 0.444435 0.369182 0.394172
+-0.742247 0.39674 0.342612 0.417473
+-0.766606 0.347345 0.314574 0.438987
+-0.787682 0.296463 0.285189 0.458622
+-0.805385 0.244311 0.254583 0.476292
+-0.81964 0.191113 0.222887 0.491923
+-0.830384 0.137097 0.190237 0.505447
+-0.837573 0.0824937 0.156772 0.516807
+-0.841175 0.0275373 0.122635 0.525954
+0.841175 0.0275372 0.0176703 -0.539773
+0.837573 0.0824937 0.0529353 -0.537461
+0.830384 0.137097 0.0879736 -0.532848
+0.81964 0.191113 0.122635 -0.525954
+0.805385 0.244311 0.156772 -0.516807
+0.787682 0.296463 0.190237 -0.505447
+0.766606 0.347345 0.222887 -0.491923
+0.742247 0.396739 0.254583 -0.476292
+0.71471 0.444435 0.285189 -0.458622
+0.684112 0.490228 0.314574 -0.438987
+0.650585 0.533921 0.342612 -0.417473
+0.614272 0.575329 0.369182 -0.394172
+0.575329 0.614272 0.394172 -0.369182
+0.533922 0.650585 0.417473 -0.342612
+0.490228 0.684112 0.438987 -0.314574
+0.444435 0.71471 0.458622 -0.285189
+0.396739 0.742247 0.476292 -0.254583
+0.347345 0.766606 0.491923 -0.222887
+0.296463 0.787682 0.505447 -0.190237
+0.244311 0.805385 0.516807 -0.156772
+0.191113 0.81964 0.525954 -0.122635
+0.137097 0.830384 0.532848 -0.0879735
+0.0824936 0.837573 0.537461 -0.0529352
+0.0275371 0.841175 0.539773 -0.0176703
+-0.0275373 0.841175 0.539773 0.0176704
+-0.0824938 0.837573 0.537461 0.0529354
+-0.137097 0.830384 0.532848 0.0879736
+-0.191113 0.81964 0.525954 0.122635
+-0.244311 0.805385 0.516807 0.156772
+-0.296463 0.787682 0.505447 0.190237
+-0.347345 0.766606 0.491923 0.222887
+-0.396739 0.742247 0.476292 0.254583
+-0.444435 0.71471 0.458622 0.285189
+-0.490228 0.684112 0.438987 0.314574
+-0.533921 0.650585 0.417473 0.342612
+-0.575329 0.614272 0.394172 0.369182
+-0.614272 0.575329 0.369182 0.394172
+-0.650585 0.533921 0.342612 0.417473
+-0.684112 0.490228 0.314574 0.438987
+-0.71471 0.444435 0.285189 0.458622
+-0.742247 0.39674 0.254583 0.476292
+-0.766606 0.347345 0.222887 0.491923
+-0.787682 0.296463 0.190237 0.505447
+-0.805385 0.244311 0.156772 0.516807
+-0.81964 0.191113 0.122635 0.525954
+-0.830384 0.137097 0.0879736 0.532848
+-0.837573 0.0824937 0.0529353 0.537461
+-0.841175 0.0275373 0.0176704 0.539773
+0.865562 0.0283356 0.0652631 -0.495722
+0.861855 0.0848853 0.0975452 -0.490393
+0.854458 0.141072 0.12941 -0.482963
+0.843402 0.196654 0.16072 -0.473465
+0.828735 0.251394 0.191342 -0.46194
+0.810518 0.305057 0.221144 -0.448436
+0.788831 0.357415 0.25 -0.433013
+0.763766 0.408242 0.277785 -0.415735
+0.735431 0.45732 0.304381 -0.396677
+0.703946 0.50444 0.329673 -0.37592
+0.669447 0.549401 0.353553 -0.353553
+0.632081 0.592008 0.37592 -0.329673
+0.592008 0.632081 0.396677 -0.304381
+0.549401 0.669447 0.415735 -0.277785
+0.50444 0.703946 0.433013 -0.25
+0.45732 0.735431 0.448436 -0.221144
+0.408241 0.763766 0.46194 -0.191342
+0.357415 0.788831 0.473465 -0.16072
+0.305057 0.810518 0.482963 -0.129409
+0.251394 0.828735 0.490393 -0.0975451
+0.196654 0.843402 0.495722 -0.0652631
+0.141072 0.854458 0.498929 -0.0327015
+0.0848852 0.861855 0.5 6.14679e-08
+0.0283355 0.865562 0.498929 0.0327016
+-0.0283356 0.865562 0.495722 0.0652631
+-0.0848854 0.861855 0.490393 0.0975452
+-0.141072 0.854458 0.482963 0.12941
+-0.196654 0.843402 0.473465 0.16072
+-0.251394 0.828735 0.46194 0.191342
+-0.305058 0.810518 0.448436 0.221144
+-0.357415 0.788831 0.433013 0.25
+-0.408241 0.763766 0.415735 0.277785
+-0.45732 0.735431 0.396677 0.304381
+-0.504441 0.703946 0.37592 0.329673
+-0.549401 0.669447 0.353553 0.353553
+-0.592008 0.632081 0.329673 0.37592
+-0.632081 0.592008 0.304381 0.396677
+-0.669447 0.549401 0.277785 0.415735
+-0.703946 0.504441 0.25 0.433013
+-0.735431 0.45732 0.221144 0.448436
+-0.763766 0.408242 0.191342 0.46194
+-0.788831 0.357415 0.16072 0.473465
+-0.810518 0.305057 0.129409 0.482963
+-0.828735 0.251394 0.0975452 0.490393
+-0.843402 0.196654 0.0652631 0.495722
+-0.854458 0.141072 0.0327016 0.498929
+-0.861855 0.0848853 -2.3719e-08 0.5
+-0.865562 0.0283356 -0.0327015 0.498929
+0.865562 0.0283356 -0.0327016 -0.498929
+0.861855 0.0848853 1.36598e-09 -0.5
+0.854458 0.141072 0.0327016 -0.498929
+0.843402 0.196654 0.0652631 -0.495722
+0.828735 0.251394 0.0975452 -0.490393
+0.810518 0.305057 0.12941 -0.482963
+0.788831 0.357415 0.16072 -0.473465
+0.763766 0.408242 0.191342 -0.46194
+0.735431 0.45732 0.221144 -0.448436
+0.703946 0.50444 0.25 -0.433013
+0.669447 0.549401 0.277785 -0.415735
+0.632081 0.592008 0.304381 -0.396677
+0.592008 0.632081 0.329673 -0.37592
+0.549401 0.669447 0.353553 -0.353553
+0.50444 0.703946 0.37592 -0.329673
+0.45732 0.735431 0.396677 -0.304381
+0.408241 0.763766 0.415735 -0.277785
+0.357415 0.788831 0.433013 -0.25
+0.305057 0.810518 0.448436 -0.221144
+0.251394 0.828735 0.46194 -0.191342
+0.196654 0.843402 0.473465 -0.16072
+0.141072 0.854458 0.482963 -0.129409
+0.0848852 0.861855 0.490393 -0.0975451
+0.0283355 0.865562 0.495722 -0.065263
+-0.0283356 0.865562 0.498929 -0.0327015
+-0.0848854 0.861855 0.5 4.18481e-08
+-0.141072 0.854458 0.498929 0.0327016
+-0.196654 0.843402 0.495722 0.0652631
+-0.251394 0.828735 0.490393 0.0975452
+-0.305058 0.810518 0.482963 0.12941
+-0.357415 0.788831 0.473465 0.16072
+-0.408241 0.763766 0.46194 0.191342
+-0.45732 0.735431 0.448436 0.221144
+-0.504441 0.703946 0.433013 0.25
+-0.549401 0.669447 0.415735 0.277785
+-0.592008 0.632081 0.396677 0.304381
+-0.632081 0.592008 0.37592 0.329673
+-0.669447 0.549401 0.353553 0.353553
+-0.703946 0.504441 0.329673 0.37592
+-0.735431 0.45732 0.304381 0.396677
+-0.763766 0.408242 0.277785 0.415735
+-0.788831 0.357415 0.25 0.433013
+-0.810518 0.305057 0.221144 0.448436
+-0.828735 0.251394 0.191342 0.46194
+-0.843402 0.196654 0.16072 0.473465
+-0.854458 0.141072 0.12941 0.482963
+-0.861855 0.0848853 0.0975451 0.490393
+-0.865562 0.0283356 0.0652631 0.495722
+0.88928 0.029112 0.0149342 -0.456191
+0.885472 0.0872114 0.0447385 -0.454238
+0.877872 0.144937 0.0743513 -0.450339
+0.866513 0.202043 0.103646 -0.444512
+0.851444 0.258283 0.132496 -0.436782
+0.832728 0.313417 0.160779 -0.427181
+0.810447 0.367209 0.188374 -0.415751
+0.784695 0.419428 0.215162 -0.40254
+0.755583 0.469852 0.241029 -0.387606
+0.723236 0.518263 0.265863 -0.371012
+0.687791 0.564456 0.28956 -0.352829
+0.649401 0.608231 0.312016 -0.333136
+0.608231 0.649401 0.333136 -0.312016
+0.564456 0.687791 0.352829 -0.28956
+0.518263 0.723236 0.371012 -0.265863
+0.469852 0.755583 0.387606 -0.241029
+0.419428 0.784695 0.40254 -0.215162
+0.367209 0.810447 0.415751 -0.188374
+0.313417 0.832728 0.427181 -0.160779
+0.258283 0.851444 0.436782 -0.132496
+0.202043 0.866513 0.444512 -0.103646
+0.144937 0.877872 0.450339 -0.0743512
+0.0872113 0.885472 0.454238 -0.0447384
+0.0291119 0.88928 0.456191 -0.0149341
+-0.0291121 0.88928 0.456191 0.0149342
+-0.0872115 0.885472 0.454238 0.0447385
+-0.144937 0.877872 0.450339 0.0743513
+-0.202043 0.866513 0.444512 0.103646
+-0.258283 0.851444 0.436781 0.132496
+-0.313417 0.832728 0.427181 0.160779
+-0.367209 0.810447 0.415751 0.188374
+-0.419428 0.784695 0.40254 0.215162
+-0.469852 0.755583 0.387606 0.241029
+-0.518263 0.723236 0.371012 0.265864
+-0.564456 0.687791 0.352829 0.28956
+-0.608231 0.649401 0.333136 0.312016
+-0.649401 0.608231 0.312016 0.333136
+-0.687791 0.564456 0.28956 0.352829
+-0.723236 0.518263 0.265864 0.371012
+-0.755583 0.469852 0.241029 0.387606
+-0.784695 0.419428 0.215162 0.40254
+-0.810447 0.367209 0.188374 0.415751
+-0.832728 0.313417 0.160779 0.427181
+-0.851444 0.258283 0.132496 0.436782
+-0.866513 0.202043 0.103646 0.444512
+-0.877872 0.144937 0.0743513 0.450339
+-0.885472 0.0872113 0.0447385 0.454238
+-0.88928 0.0291121 0.0149342 0.456191
+0.0510037 0.00166969 0.682702 0.728913
+0.0507853 0.00500192 0.633567 0.772003
+0.0503494 0.00831272 0.581719 0.811788
+0.049698 0.0115879 0.52738 0.848096
+0.0488337 0.0148135 0.470783 0.880772
+0.0477602 0.0179757 0.412169 0.909677
+0.0464823 0.0210609 0.351791 0.934687
+0.0450054 0.0240559 0.289906 0.955694
+0.0433357 0.0269479 0.22678 0.972608
+0.0414804 0.0297244 0.162683 0.985358
+0.0394475 0.0323737 0.0978894 0.993888
+0.0372457 0.0348844 0.0326764 0.998162
+0.0348844 0.0372457 -0.0326765 0.998162
+0.0323737 0.0394475 -0.0978894 0.993888
+0.0297244 0.0414804 -0.162683 0.985358
+0.0269478 0.0433357 -0.22678 0.972608
+0.0240559 0.0450054 -0.289907 0.955693
+0.0210609 0.0464823 -0.351791 0.934686
+0.0179757 0.0477603 -0.412169 0.909677
+0.0148135 0.0488337 -0.470783 0.880772
+0.0115879 0.049698 -0.52738 0.848096
+0.00831272 0.0503494 -0.581719 0.811788
+0.00500191 0.0507853 -0.633567 0.772003
+0.00166968 0.0510037 -0.682702 0.728913
+-0.00166969 0.0510037 -0.728913 0.682702
+-0.00500192 0.0507853 -0.772003 0.633567
+-0.00831273 0.0503494 -0.811788 0.581719
+-0.0115879 0.049698 -0.848096 0.52738
+-0.0148135 0.0488337 -0.880772 0.470782
+-0.0179757 0.0477602 -0.909677 0.412169
+-0.0210609 0.0464823 -0.934687 0.351791
+-0.0240559 0.0450054 -0.955693 0.289907
+-0.0269478 0.0433357 -0.972608 0.22678
+-0.0297244 0.0414804 -0.985358 0.162683
+-0.0323737 0.0394475 -0.993888 0.0978895
+-0.0348844 0.0372457 -0.998162 0.0326764
+-0.0372457 0.0348844 -0.998162 -0.0326764
+-0.0394475 0.0323737 -0.993888 -0.0978895
+-0.0414804 0.0297244 -0.985358 -0.162683
+-0.0433357 0.0269478 -0.972608 -0.22678
+-0.0450054 0.0240559 -0.955694 -0.289906
+-0.0464823 0.0210609 -0.934687 -0.351791
+-0.0477603 0.0179757 -0.909677 -0.412169
+-0.0488337 0.0148135 -0.880772 -0.470783
+-0.049698 0.0115879 -0.848096 -0.52738
+-0.0503494 0.00831273 -0.811788 -0.581719
+-0.0507853 0.00500191 -0.772003 -0.633567
+-0.0510037 0.00166969 -0.728913 -0.682702
+0.102007 0.00333938 0.350411 0.931019
+0.101571 0.0100038 0.288769 0.951943
+0.100699 0.0166254 0.22589 0.968791
+0.0993959 0.0231759 0.162045 0.981491
+0.0976673 0.0296271 0.0975053 0.989988
+0.0955205 0.0359514 0.0325482 0.994245
+0.0929646 0.0421217 -0.0325482 0.994245
+0.0900107 0.0481117 -0.0975053 0.989988
+0.0866713 0.0538957 -0.162045 0.981491
+0.0829608 0.0594489 -0.22589 0.968791
+0.0788951 0.0647475 -0.288769 0.951943
+0.0744914 0.0697688 -0.350411 0.931019
+0.0697688 0.0744914 -0.410552 0.906107
+0.0647475 0.078895 -0.468935 0.877316
+0.0594489 0.0829608 -0.52531 0.844768
+0.0538957 0.0866713 -0.579436 0.808602
+0.0481117 0.0900107 -0.631081 0.768974
+0.0421217 0.0929647 -0.680023 0.726053
+0.0359514 0.0955205 -0.726053 0.680023
+0.0296271 0.0976673 -0.768974 0.63108
+0.0231759 0.0993959 -0.808602 0.579436
+0.0166254 0.100699 -0.844768 0.52531
+0.0100038 0.101571 -0.877316 0.468935
+0.00333937 0.102007 -0.906107 0.410552
+-0.00333939 0.102007 -0.931019 0.350411
+-0.0100038 0.101571 -0.951943 0.288769
+-0.0166255 0.100699 -0.968791 0.22589
+-0.0231759 0.0993959 -0.981491 0.162045
+-0.0296271 0.0976673 -0.989988 0.0975051
+-0.0359514 0.0955205 -0.994245 0.0325481
+-0.0421217 0.0929646 -0.994245 -0.0325484
+-0.0481117 0.0900107 -0.989988 -0.0975052
+-0.0538957 0.0866713 -0.981491 -0.162045
+-0.0594489 0.0829608 -0.968791 -0.225891
+-0.0647475 0.0788951 -0.951943 -0.288769
+-0.0697689 0.0744914 -0.931019 -0.350411
+-0.0744914 0.0697689 -0.906107 -0.410552
+-0.0788951 0.0647475 -0.877316 -0.468935
+-0.0829608 0.0594489 -0.844768 -0.52531
+-0.0866713 0.0538957 -0.808602 -0.579436
+-0.0900107 0.0481117 -0.768974 -0.63108
+-0.0929646 0.0421217 -0.726053 -0.680023
+-0.0955205 0.0359514 -0.680023 -0.726053
+-0.0976673 0.0296271 -0.631081 -0.768974
+-0.0993959 0.0231759 -0.579436 -0.808602
+-0.100699 0.0166255 -0.52531 -0.844768
+-0.101571 0.0100038 -0.468935 -0.877316
+-0.102007 0.00333939 -0.410552 -0.906107
+0.102007 0.00333938 0.906107 0.410552
+0.101571 0.0100038 0.877316 0.468935
+0.100699 0.0166254 0.844768 0.52531
+0.0993959 0.0231759 0.808602 0.579436
+0.0976673 0.0296271 0.768974 0.631081
+0.0955205 0.0359514 0.726053 0.680023
+0.0929646 0.0421217 0.680023 0.726053
+0.0900107 0.0481117 0.63108 0.768974
+0.0866713 0.0538957 0.579436 0.808602
+0.0829608 0.0594489 0.52531 0.844768
+0.0788951 0.0647475 0.468935 0.877316
+0.0744914 0.0697688 0.410552 0.906107
+0.0697688 0.0744914 0.350411 0.931019
+0.0647475 0.078895 0.288769 0.951943
+0.0594489 0.0829608 0.22589 0.968791
+0.0538957 0.0866713 0.162045 0.981491
+0.0481117 0.0900107 0.0975052 0.989988
+0.0421217 0.0929647 0.0325482 0.994245
+0.0359514 0.0955205 -0.0325483 0.994245
+0.0296271 0.0976673 -0.0975053 0.989988
+0.0231759 0.0993959 -0.162045 0.981491
+0.0166254 0.100699 -0.225891 0.968791
+0.0100038 0.101571 -0.288769 0.951943
+0.00333937 0.102007 -0.350411 0.931019
+-0.00333939 0.102007 -0.410552 0.906107
+-0.0100038 0.101571 -0.468935 0.877316
+-0.0166255 0.100699 -0.52531 0.844768
+-0.0231759 0.0993959 -0.579436 0.808602
+-0.0296271 0.0976673 -0.631081 0.768974
+-0.0359514 0.0955205 -0.680023 0.726053
+-0.0421217 0.0929646 -0.726053 0.680023
+-0.0481117 0.0900107 -0.768974 0.631081
+-0.0538957 0.0866713 -0.808602 0.579436
+-0.0594489 0.0829608 -0.844768 0.52531
+-0.0647475 0.0788951 -0.877316 0.468935
+-0.0697689 0.0744914 -0.906107 0.410552
+-0.0744914 0.0697689 -0.931019 0.350411
+-0.0788951 0.0647475 -0.951943 0.288769
+-0.0829608 0.0594489 -0.968791 0.225891
+-0.0866713 0.0538957 -0.981491 0.162045
+-0.0900107 0.0481117 -0.989988 0.0975054
+-0.0929646 0.0421217 -0.994245 0.0325482
+-0.0955205 0.0359514 -0.994245 -0.0325483
+-0.0976673 0.0296271 -0.989988 -0.0975053
+-0.0993959 0.0231759 -0.981491 -0.162045
+-0.100699 0.0166255 -0.968791 -0.22589
+-0.101571 0.0100038 -0.951943 -0.288769
+-0.102007 0.00333939 -0.931019 -0.350411
+0.153011 0.00500907 0.675534 0.72126
+0.152356 0.0150057 0.626915 0.763898
+0.151048 0.0249382 0.575611 0.803265
+0.149094 0.0347638 0.521843 0.839192
+0.146501 0.0444406 0.46584 0.871525
+0.143281 0.0539271 0.407842 0.900126
+0.139447 0.0631826 0.348098 0.924873
+0.135016 0.0721676 0.286863 0.94566
+0.130007 0.0808436 0.224399 0.962397
+0.124441 0.0891733 0.160975 0.975013
+0.118343 0.0971212 0.0968617 0.983453
+0.111737 0.104653 0.0323334 0.987683
+0.104653 0.111737 -0.0323334 0.987683
+0.0971212 0.118343 -0.0968617 0.983453
+0.0891733 0.124441 -0.160975 0.975013
+0.0808435 0.130007 -0.2244 0.962397
+0.0721676 0.135016 -0.286863 0.94566
+0.0631826 0.139447 -0.348098 0.924873
+0.053927 0.143281 -0.407842 0.900126
+0.0444406 0.146501 -0.46584 0.871525
+0.0347638 0.149094 -0.521843 0.839192
+0.0249382 0.151048 -0.575611 0.803265
+0.0150057 0.152356 -0.626915 0.763898
+0.00500905 0.153011 -0.675534 0.72126
+-0.00500908 0.153011 -0.72126 0.675534
+-0.0150058 0.152356 -0.763898 0.626915
+-0.0249382 0.151048 -0.803265 0.575611
+-0.0347638 0.149094 -0.839192 0.521843
+-0.0444406 0.146501 -0.871525 0.46584
+-0.0539271 0.143281 -0.900126 0.407842
+-0.0631826 0.139447 -0.924873 0.348098
+-0.0721676 0.135016 -0.94566 0.286863
+-0.0808435 0.130007 -0.962397 0.224399
+-0.0891733 0.124441 -0.975013 0.160975
+-0.0971212 0.118343 -0.983453 0.0968617
+-0.104653 0.111737 -0.987683 0.0323333
+-0.111737 0.104653 -0.987683 -0.0323333
+-0.118343 0.0971212 -0.983453 -0.0968617
+-0.124441 0.0891733 -0.975013 -0.160975
+-0.130007 0.0808435 -0.962397 -0.224399
+-0.135016 0.0721676 -0.94566 -0.286863
+-0.139447 0.0631826 -0.924873 -0.348098
+-0.143281 0.053927 -0.900126 -0.407842
+-0.146501 0.0444406 -0.871525 -0.46584
+-0.149094 0.0347638 -0.839192 -0.521843
+-0.151048 0.0249382 -0.803265 -0.575611
+-0.152356 0.0150057 -0.763898 -0.626915
+-0.153011 0.00500908 -0.72126 -0.675534
+0.153011 0.00500907 0.224399 0.962397
+0.152356 0.0150057 0.160975 0.975013
+0.151048 0.0249382 0.0968617 0.983453
+0.149094 0.0347638 0.0323334 0.987683
+0.146501 0.0444406 -0.0323334 0.987683
+0.143281 0.0539271 -0.0968617 0.983453
+0.139447 0.0631826 -0.160975 0.975013
+0.135016 0.0721676 -0.224399 0.962397
+0.130007 0.0808436 -0.286863 0.94566
+0.124441 0.0891733 -0.348098 0.924873
+0.118343 0.0971212 -0.407842 0.900126
+0.111737 0.104653 -0.46584 0.871525
+0.104653 0.111737 -0.521843 0.839192
+0.0971212 0.118343 -0.575611 0.803265
+0.0891733 0.124441 -0.626915 0.763898
+0.0808435 0.130007 -0.675534 0.72126
+0.0721676 0.135016 -0.72126 0.675534
+0.0631826 0.139447 -0.763898 0.626915
+0.053927 0.143281 -0.803265 0.575611
+0.0444406 0.146501 -0.839192 0.521843
+0.0347638 0.149094 -0.871525 0.46584
+0.0249382 0.151048 -0.900126 0.407842
+0.0150057 0.152356 -0.924873 0.348098
+0.00500905 0.153011 -0.94566 0.286863
+-0.00500908 0.153011 -0.962397 0.224399
+-0.0150058 0.152356 -0.975013 0.160975
+-0.0249382 0.151048 -0.983453 0.0968616
+-0.0347638 0.149094 -0.987683 0.0323333
+-0.0444406 0.146501 -0.987683 -0.0323335
+-0.0539271 0.143281 -0.983453 -0.0968618
+-0.0631826 0.139447 -0.975013 -0.160975
+-0.0721676 0.135016 -0.962397 -0.224399
+-0.0808435 0.130007 -0.94566 -0.286863
+-0.0891733 0.124441 -0.924873 -0.348098
+-0.0971212 0.118343 -0.900126 -0.407842
+-0.104653 0.111737 -0.871525 -0.46584
+-0.111737 0.104653 -0.839192 -0.521843
+-0.118343 0.0971212 -0.803265 -0.575611
+-0.124441 0.0891733 -0.763898 -0.626915
+-0.130007 0.0808435 -0.72126 -0.675534
+-0.135016 0.0721676 -0.675534 -0.72126
+-0.139447 0.0631826 -0.626915 -0.763898
+-0.143281 0.053927 -0.575611 -0.803265
+-0.146501 0.0444406 -0.521843 -0.839192
+-0.149094 0.0347638 -0.46584 -0.871525
+-0.151048 0.0249382 -0.407842 -0.900126
+-0.152356 0.0150057 -0.348098 -0.924873
+-0.153011 0.00500908 -0.286863 -0.94566
+0.204015 0.00667875 0.159466 0.96587
+0.203141 0.0200077 0.0959534 0.974231
+0.201398 0.0332509 0.0320302 0.978421
+0.198792 0.0463518 -0.0320302 0.978421
+0.195335 0.0592541 -0.0959534 0.974231
+0.191041 0.0719027 -0.159466 0.96587
+0.185929 0.0842435 -0.222295 0.953372
+0.180021 0.0962235 -0.284173 0.936792
+0.173343 0.107791 -0.344833 0.9162
+0.165922 0.118898 -0.404017 0.891686
+0.15779 0.129495 -0.461471 0.863352
+0.148983 0.139538 -0.516949 0.831322
+0.139538 0.148983 -0.570214 0.795732
+0.129495 0.15779 -0.621036 0.756735
+0.118898 0.165922 -0.669199 0.714497
+0.107791 0.173343 -0.714497 0.669199
+0.0962234 0.180021 -0.756735 0.621036
+0.0842435 0.185929 -0.795732 0.570214
+0.0719027 0.191041 -0.831322 0.516949
+0.0592541 0.195335 -0.863352 0.461471
+0.0463517 0.198792 -0.891686 0.404017
+0.0332509 0.201398 -0.9162 0.344833
+0.0200076 0.203141 -0.936792 0.284173
+0.00667873 0.204015 -0.953372 0.222295
+-0.00667877 0.204015 -0.96587 0.159466
+-0.0200077 0.203141 -0.974231 0.0959533
+-0.0332509 0.201398 -0.978421 0.0320301
+-0.0463518 0.198792 -0.978421 -0.0320302
+-0.0592541 0.195335 -0.974231 -0.0959535
+-0.0719028 0.191041 -0.96587 -0.159466
+-0.0842435 0.185929 -0.953372 -0.222295
+-0.0962234 0.180021 -0.936792 -0.284173
+-0.107791 0.173343 -0.9162 -0.344833
+-0.118898 0.165922 -0.891686 -0.404018
+-0.129495 0.15779 -0.863352 -0.461471
+-0.139538 0.148983 -0.831322 -0.516949
+-0.148983 0.139538 -0.795732 -0.570214
+-0.15779 0.129495 -0.756735 -0.621036
+-0.165922 0.118898 -0.714497 -0.669199
+-0.173343 0.107791 -0.669199 -0.714497
+-0.180021 0.0962235 -0.621036 -0.756735
+-0.185929 0.0842435 -0.570214 -0.795732
+-0.191041 0.0719027 -0.516949 -0.831322
+-0.195335 0.0592541 -0.461472 -0.863352
+-0.198792 0.0463517 -0.404017 -0.891686
+-0.201398 0.0332509 -0.344833 -0.9162
+-0.203141 0.0200077 -0.284173 -0.936792
+-0.204015 0.00667877 -0.222295 -0.953372
+0.204015 0.00667875 0.516949 0.831322
+0.203141 0.0200077 0.461471 0.863352
+0.201398 0.0332509 0.404017 0.891686
+0.198792 0.0463518 0.344833 0.9162
+0.195335 0.0592541 0.284173 0.936792
+0.191041 0.0719027 0.222295 0.953372
+0.185929 0.0842435 0.159466 0.96587
+0.180021 0.0962235 0.0959534 0.974231
+0.173343 0.107791 0.0320302 0.978421
+0.165922 0.118898 -0.0320302 0.978421
+0.15779 0.129495 -0.0959534 0.974231
+0.148983 0.139538 -0.159466 0.96587
+0.139538 0.148983 -0.222295 0.953372
+0.129495 0.15779 -0.284173 0.936792
+0.118898 0.165922 -0.344833 0.9162
+0.107791 0.173343 -0.404018 0.891686
+0.0962234 0.180021 -0.461472 0.863352
+0.0842435 0.185929 -0.516949 0.831322
+0.0719027 0.191041 -0.570214 0.795732
+0.0592541 0.195335 -0.621036 0.756735
+0.0463517 0.198792 -0.669199 0.714497
+0.0332509 0.201398 -0.714497 0.669199
+0.0200076 0.203141 -0.756735 0.621036
+0.00667873 0.204015 -0.795732 0.570214
+-0.00667877 0.204015 -0.831322 0.516949
+-0.0200077 0.203141 -0.863352 0.461471
+-0.0332509 0.201398 -0.891686 0.404017
+-0.0463518 0.198792 -0.9162 0.344833
+-0.0592541 0.195335 -0.936792 0.284173
+-0.0719028 0.191041 -0.953372 0.222295
+-0.0842435 0.185929 -0.96587 0.159466
+-0.0962234 0.180021 -0.974231 0.0959535
+-0.107791 0.173343 -0.978421 0.0320302
+-0.118898 0.165922 -0.978421 -0.0320303
+-0.129495 0.15779 -0.974231 -0.0959534
+-0.139538 0.148983 -0.96587 -0.159466
+-0.148983 0.139538 -0.953372 -0.222295
+-0.15779 0.129495 -0.936792 -0.284173
+-0.165922 0.118898 -0.9162 -0.344833
+-0.173343 0.107791 -0.891686 -0.404018
+-0.180021 0.0962235 -0.863352 -0.461471
+-0.185929 0.0842435 -0.831322 -0.516949
+-0.191041 0.0719027 -0.795732 -0.570214
+-0.195335 0.0592541 -0.756735 -0.621036
+-0.198792 0.0463517 -0.714497 -0.669199
+-0.201398 0.0332509 -0.669199 -0.714497
+-0.203141 0.0200077 -0.621036 -0.756735
+-0.204015 0.00667877 -0.570214 -0.795732
+0.255019 0.00834844 0.41054 0.875416
+0.253927 0.0250096 0.352407 0.900392
+0.251747 0.0415636 0.292764 0.921513
+0.24849 0.0579397 0.231867 0.938687
+0.244168 0.0740676 0.169977 0.951842
+0.238801 0.0898784 0.10736 0.960921
+0.232412 0.105304 0.0442829 0.965886
+0.225027 0.120279 -0.0189838 0.966714
+0.216678 0.134739 -0.0821693 0.963402
+0.207402 0.148622 -0.145003 0.955965
+0.197238 0.161869 -0.207216 0.944435
+0.186229 0.174422 -0.268541 0.92886
+0.174422 0.186229 -0.328716 0.909308
+0.161869 0.197238 -0.387484 0.885862
+0.148622 0.207402 -0.444593 0.858623
+0.134739 0.216678 -0.499797 0.827707
+0.120279 0.225027 -0.552862 0.793246
+0.105304 0.232412 -0.603559 0.755389
+0.0898784 0.238801 -0.651671 0.714297
+0.0740676 0.244168 -0.696993 0.670146
+0.0579397 0.24849 -0.739331 0.623126
+0.0415636 0.251747 -0.778502 0.573437
+0.0250096 0.253927 -0.81434 0.521293
+0.00834842 0.255019 -0.846691 0.466916
+-0.00834847 0.255019 -0.875416 0.41054
+-0.0250096 0.253927 -0.900392 0.352406
+-0.0415636 0.251747 -0.921513 0.292764
+-0.0579397 0.24849 -0.938687 0.231867
+-0.0740677 0.244168 -0.951842 0.169977
+-0.0898785 0.238801 -0.960921 0.10736
+-0.105304 0.232412 -0.965886 0.0442828
+-0.120279 0.225027 -0.966714 -0.0189837
+-0.134739 0.216678 -0.963402 -0.0821693
+-0.148622 0.207402 -0.955965 -0.145003
+-0.161869 0.197238 -0.944435 -0.207216
+-0.174422 0.186229 -0.92886 -0.268541
+-0.186229 0.174422 -0.909308 -0.328716
+-0.197238 0.161869 -0.885862 -0.387484
+-0.207402 0.148622 -0.858623 -0.444593
+-0.216678 0.134739 -0.827707 -0.499797
+-0.225027 0.120279 -0.793246 -0.552862
+-0.232412 0.105304 -0.755389 -0.603559
+-0.238801 0.0898784 -0.714297 -0.651672
+-0.244168 0.0740676 -0.670146 -0.696993
+-0.24849 0.0579397 -0.623126 -0.739331
+-0.251747 0.0415636 -0.573437 -0.778502
+-0.253927 0.0250096 -0.521293 -0.81434
+-0.255019 0.00834847 -0.466916 -0.846691
+0.153011 0.00500907 0.94566 0.286863
+0.152356 0.0150057 0.924873 0.348098
+0.151048 0.0249382 0.900126 0.407842
+0.149094 0.0347638 0.871525 0.46584
+0.146501 0.0444406 0.839192 0.521843
+0.143281 0.0539271 0.803265 0.575611
+0.139447 0.0631826 0.763898 0.626915
+0.135016 0.0721676 0.72126 0.675534
+0.130007 0.0808436 0.675534 0.72126
+0.124441 0.0891733 0.626915 0.763898
+0.118343 0.0971212 0.575611 0.803265
+0.111737 0.104653 0.521843 0.839192
+0.104653 0.111737 0.46584 0.871525
+0.0971212 0.118343 0.407842 0.900126
+0.0891733 0.124441 0.348098 0.924873
+0.0808435 0.130007 0.286863 0.94566
+0.0721676 0.135016 0.224399 0.962397
+0.0631826 0.139447 0.160975 0.975013
+0.053927 0.143281 0.0968616 0.983453
+0.0444406 0.146501 0.0323333 0.987683
+0.0347638 0.149094 -0.0323334 0.987683
+0.0249382 0.151048 -0.0968618 0.983453
+0.0150057 0.152356 -0.160975 0.975013
+0.00500905 0.153011 -0.2244 0.962397
+-0.00500908 0.153011 -0.286863 0.94566
+-0.0150058 0.152356 -0.348098 0.924873
+-0.0249382 0.151048 -0.407842 0.900126
+-0.0347638 0.149094 -0.46584 0.871525
+-0.0444406 0.146501 -0.521843 0.839192
+-0.0539271 0.143281 -0.575611 0.803265
+-0.0631826 0.139447 -0.626915 0.763898
+-0.0721676 0.135016 -0.675534 0.72126
+-0.0808435 0.130007 -0.72126 0.675534
+-0.0891733 0.124441 -0.763898 0.626915
+-0.0971212 0.118343 -0.803265 0.575611
+-0.104653 0.111737 -0.839192 0.521843
+-0.111737 0.104653 -0.871525 0.46584
+-0.118343 0.0971212 -0.900126 0.407842
+-0.124441 0.0891733 -0.924873 0.348098
+-0.130007 0.0808435 -0.94566 0.286863
+-0.135016 0.0721676 -0.962397 0.2244
+-0.139447 0.0631826 -0.975013 0.160975
+-0.143281 0.053927 -0.983453 0.0968616
+-0.146501 0.0444406 -0.987683 0.0323334
+-0.149094 0.0347638 -0.987683 -0.0323335
+-0.151048 0.0249382 -0.983453 -0.0968616
+-0.152356 0.0150057 -0.975013 -0.160975
+-0.153011 0.00500908 -0.962397 -0.224399
+0.204015 0.00667875 0.795732 0.570214
+0.203141 0.0200077 0.756735 0.621036
+0.201398 0.0332509 0.714497 0.669199
+0.198792 0.0463518 0.669199 0.714497
+0.195335 0.0592541 0.621036 0.756735
+0.191041 0.0719027 0.570214 0.795732
+0.185929 0.0842435 0.516949 0.831322
+0.180021 0.0962235 0.461471 0.863352
+0.173343 0.107791 0.404017 0.891686
+0.165922 0.118898 0.344833 0.9162
+0.15779 0.129495 0.284173 0.936792
+0.148983 0.139538 0.222295 0.953372
+0.139538 0.148983 0.159466 0.96587
+0.129495 0.15779 0.0959534 0.974231
+0.118898 0.165922 0.0320301 0.978421
+0.107791 0.173343 -0.0320303 0.978421
+0.0962234 0.180021 -0.0959535 0.974231
+0.0842435 0.185929 -0.159466 0.96587
+0.0719027 0.191041 -0.222295 0.953372
+0.0592541 0.195335 -0.284173 0.936792
+0.0463517 0.198792 -0.344833 0.9162
+0.0332509 0.201398 -0.404018 0.891686
+0.0200076 0.203141 -0.461472 0.863352
+0.00667873 0.204015 -0.516949 0.831322
+-0.00667877 0.204015 -0.570214 0.795732
+-0.0200077 0.203141 -0.621036 0.756735
+-0.0332509 0.201398 -0.669199 0.714497
+-0.0463518 0.198792 -0.714497 0.669199
+-0.0592541 0.195335 -0.756735 0.621036
+-0.0719028 0.191041 -0.795732 0.570214
+-0.0842435 0.185929 -0.831322 0.516949
+-0.0962234 0.180021 -0.863352 0.461472
+-0.107791 0.173343 -0.891686 0.404017
+-0.118898 0.165922 -0.9162 0.344833
+-0.129495 0.15779 -0.936792 0.284173
+-0.139538 0.148983 -0.953372 0.222295
+-0.148983 0.139538 -0.96587 0.159466
+-0.15779 0.129495 -0.974231 0.0959533
+-0.165922 0.118898 -0.978421 0.0320303
+-0.173343 0.107791 -0.978421 -0.0320302
+-0.180021 0.0962235 -0.974231 -0.0959533
+-0.185929 0.0842435 -0.96587 -0.159466
+-0.191041 0.0719027 -0.953372 -0.222295
+-0.195335 0.0592541 -0.936792 -0.284173
+-0.198792 0.0463517 -0.9162 -0.344833
+-0.201398 0.0332509 -0.891686 -0.404017
+-0.203141 0.0200077 -0.863352 -0.461472
+-0.204015 0.00667877 -0.831322 -0.516949
+0.204015 0.00667875 0.953372 0.222295
+0.203141 0.0200077 0.936792 0.284173
+0.201398 0.0332509 0.9162 0.344833
+0.198792 0.0463518 0.891686 0.404017
+0.195335 0.0592541 0.863352 0.461471
+0.191041 0.0719027 0.831322 0.516949
+0.185929 0.0842435 0.795732 0.570214
+0.180021 0.0962235 0.756735 0.621036
+0.173343 0.107791 0.714497 0.669199
+0.165922 0.118898 0.669199 0.714497
+0.15779 0.129495 0.621036 0.756735
+0.148983 0.139538 0.570214 0.795732
+0.139538 0.148983 0.516949 0.831322
+0.129495 0.15779 0.461471 0.863352
+0.118898 0.165922 0.404017 0.891686
+0.107791 0.173343 0.344833 0.9162
+0.0962234 0.180021 0.284173 0.936792
+0.0842435 0.185929 0.222295 0.953372
+0.0719027 0.191041 0.159466 0.96587
+0.0592541 0.195335 0.0959533 0.974231
+0.0463517 0.198792 0.0320302 0.978421
+0.0332509 0.201398 -0.0320303 0.978421
+0.0200076 0.203141 -0.0959535 0.974231
+0.00667873 0.204015 -0.159466 0.96587
+-0.00667877 0.204015 -0.222295 0.953372
+-0.0200077 0.203141 -0.284173 0.936792
+-0.0332509 0.201398 -0.344833 0.9162
+-0.0463518 0.198792 -0.404018 0.891686
+-0.0592541 0.195335 -0.461472 0.863352
+-0.0719028 0.191041 -0.51695 0.831322
+-0.0842435 0.185929 -0.570214 0.795732
+-0.0962234 0.180021 -0.621036 0.756735
+-0.107791 0.173343 -0.669199 0.714497
+-0.118898 0.165922 -0.714497 0.669199
+-0.129495 0.15779 -0.756735 0.621036
+-0.139538 0.148983 -0.795732 0.570214
+-0.148983 0.139538 -0.831322 0.516949
+-0.15779 0.129495 -0.863352 0.461471
+-0.165922 0.118898 -0.891686 0.404018
+-0.173343 0.107791 -0.9162 0.344833
+-0.180021 0.0962235 -0.936792 0.284173
+-0.185929 0.0842435 -0.953372 0.222295
+-0.191041 0.0719027 -0.96587 0.159466
+-0.195335 0.0592541 -0.974231 0.0959534
+-0.198792 0.0463517 -0.978421 0.0320301
+-0.201398 0.0332509 -0.978421 -0.0320301
+-0.203141 0.0200077 -0.974231 -0.0959534
+-0.204015 0.00667877 -0.96587 -0.159466
+0.255019 0.00834844 0.846691 0.466916
+0.253927 0.0250096 0.81434 0.521293
+0.251747 0.0415636 0.778502 0.573437
+0.24849 0.0579397 0.739331 0.623126
+0.244168 0.0740676 0.696993 0.670146
+0.238801 0.0898784 0.651671 0.714297
+0.232412 0.105304 0.603559 0.755389
+0.225027 0.120279 0.552862 0.793246
+0.216678 0.134739 0.499797 0.827707
+0.207402 0.148622 0.444593 0.858623
+0.197238 0.161869 0.387484 0.885862
+0.186229 0.174422 0.328716 0.909308
+0.174422 0.186229 0.268541 0.92886
+0.161869 0.197238 0.207216 0.944435
+0.148622 0.207402 0.145003 0.955965
+0.134739 0.216678 0.0821692 0.963402
+0.120279 0.225027 0.0189837 0.966714
+0.105304 0.232412 -0.044283 0.965886
+0.0898784 0.238801 -0.10736 0.960921
+0.0740676 0.244168 -0.169977 0.951842
+0.0579397 0.24849 -0.231867 0.938687
+0.0415636 0.251747 -0.292764 0.921512
+0.0250096 0.253927 -0.352407 0.900392
+0.00834842 0.255019 -0.410541 0.875415
+-0.00834847 0.255019 -0.466916 0.846691
+-0.0250096 0.253927 -0.521293 0.81434
+-0.0415636 0.251747 -0.573437 0.778502
+-0.0579397 0.24849 -0.623126 0.739331
+-0.0740677 0.244168 -0.670146 0.696993
+-0.0898785 0.238801 -0.714297 0.651671
+-0.105304 0.232412 -0.755389 0.603559
+-0.120279 0.225027 -0.793246 0.552862
+-0.134739 0.216678 -0.827707 0.499797
+-0.148622 0.207402 -0.858623 0.444593
+-0.161869 0.197238 -0.885862 0.387484
+-0.174422 0.186229 -0.909308 0.328716
+-0.186229 0.174422 -0.92886 0.268541
+-0.197238 0.161869 -0.944435 0.207216
+-0.207402 0.148622 -0.955965 0.145003
+-0.216678 0.134739 -0.963402 0.0821693
+-0.225027 0.120279 -0.966714 0.0189839
+-0.232412 0.105304 -0.965886 -0.0442829
+-0.238801 0.0898784 -0.960921 -0.10736
+-0.244168 0.0740676 -0.951842 -0.169977
+-0.24849 0.0579397 -0.938687 -0.231867
+-0.251747 0.0415636 -0.921513 -0.292764
+-0.253927 0.0250096 -0.900392 -0.352407
+-0.255019 0.00834847 -0.875416 -0.41054
+0.255019 0.00834844 0.660965 0.705706
+0.253927 0.0250096 0.613395 0.747424
+0.251747 0.0415636 0.563198 0.785942
+0.24849 0.0579397 0.510589 0.821094
+0.244168 0.0740676 0.455794 0.85273
+0.238801 0.0898784 0.399046 0.880714
+0.232412 0.105304 0.340591 0.904928
+0.225027 0.120279 0.280676 0.925266
+0.216678 0.134739 0.21956 0.941642
+0.207402 0.148622 0.157504 0.953986
+0.197238 0.161869 0.0947728 0.962244
+0.186229 0.174422 0.0316361 0.966382
+0.174422 0.186229 -0.0316361 0.966382
+0.161869 0.197238 -0.0947728 0.962244
+0.148622 0.207402 -0.157504 0.953986
+0.134739 0.216678 -0.21956 0.941642
+0.120279 0.225027 -0.280676 0.925266
+0.105304 0.232412 -0.340591 0.904928
+0.0898784 0.238801 -0.399047 0.880714
+0.0740676 0.244168 -0.455794 0.85273
+0.0579397 0.24849 -0.510589 0.821094
+0.0415636 0.251747 -0.563198 0.785941
+0.0250096 0.253927 -0.613395 0.747424
+0.00834842 0.255019 -0.660966 0.705706
+-0.00834847 0.255019 -0.705706 0.660965
+-0.0250096 0.253927 -0.747424 0.613395
+-0.0415636 0.251747 -0.785942 0.563198
+-0.0579397 0.24849 -0.821094 0.510589
+-0.0740677 0.244168 -0.85273 0.455793
+-0.0898785 0.238801 -0.880714 0.399046
+-0.105304 0.232412 -0.904928 0.34059
+-0.120279 0.225027 -0.925266 0.280676
+-0.134739 0.216678 -0.941642 0.21956
+-0.148622 0.207402 -0.953986 0.157504
+-0.161869 0.197238 -0.962244 0.0947728
+-0.174422 0.186229 -0.966382 0.031636
+-0.186229 0.174422 -0.966382 -0.031636
+-0.197238 0.161869 -0.962244 -0.0947728
+-0.207402 0.148622 -0.953986 -0.157504
+-0.216678 0.134739 -0.941642 -0.21956
+-0.225027 0.120279 -0.925266 -0.280676
+-0.232412 0.105304 -0.904928 -0.340591
+-0.238801 0.0898784 -0.880714 -0.399047
+-0.244168 0.0740676 -0.85273 -0.455794
+-0.24849 0.0579397 -0.821094 -0.510589
+-0.251747 0.0415636 -0.785942 -0.563198
+-0.253927 0.0250096 -0.747424 -0.613395
+-0.255019 0.00834847 -0.705706 -0.660965
+0.306022 0.0100181 0.554502 0.773807
+0.304712 0.0300115 0.502706 0.808416
+0.302097 0.0498763 0.448756 0.839564
+0.298188 0.0695276 0.392885 0.867117
+0.293002 0.0888812 0.335332 0.890956
+0.286561 0.107854 0.276343 0.91098
+0.278894 0.126365 0.21617 0.927103
+0.270032 0.144335 0.155072 0.939256
+0.260014 0.161687 0.0933095 0.947388
+0.248882 0.178347 0.0311476 0.951462
+0.236685 0.194242 -0.0311476 0.951462
+0.223474 0.209307 -0.0933096 0.947388
+0.209307 0.223474 -0.155072 0.939256
+0.194242 0.236685 -0.21617 0.927103
+0.178347 0.248882 -0.276343 0.91098
+0.161687 0.260014 -0.335332 0.890956
+0.144335 0.270032 -0.392885 0.867116
+0.126365 0.278894 -0.448756 0.839564
+0.107854 0.286562 -0.502706 0.808416
+0.0888812 0.293002 -0.554502 0.773807
+0.0695276 0.298188 -0.603924 0.735884
+0.0498763 0.302097 -0.650761 0.69481
+0.0300115 0.304712 -0.69481 0.65076
+0.0100181 0.306022 -0.735884 0.603924
+-0.0100182 0.306022 -0.773807 0.554502
+-0.0300115 0.304712 -0.808416 0.502706
+-0.0498764 0.302097 -0.839564 0.448756
+-0.0695276 0.298188 -0.867117 0.392885
+-0.0888812 0.293002 -0.890956 0.335332
+-0.107854 0.286561 -0.91098 0.276343
+-0.126365 0.278894 -0.927103 0.21617
+-0.144335 0.270032 -0.939256 0.155072
+-0.161687 0.260014 -0.947388 0.0933095
+-0.178347 0.248882 -0.951462 0.0311475
+-0.194242 0.236685 -0.951462 -0.0311476
+-0.209307 0.223474 -0.947388 -0.0933096
+-0.223474 0.209307 -0.939256 -0.155072
+-0.236685 0.194242 -0.927103 -0.21617
+-0.248882 0.178347 -0.91098 -0.276343
+-0.260014 0.161687 -0.890956 -0.335332
+-0.270032 0.144335 -0.867117 -0.392885
+-0.278894 0.126365 -0.839564 -0.448756
+-0.286562 0.107854 -0.808416 -0.502706
+-0.293002 0.0888812 -0.773807 -0.554502
+-0.298188 0.0695276 -0.735884 -0.603924
+-0.302097 0.0498764 -0.69481 -0.65076
+-0.304712 0.0300115 -0.65076 -0.69481
+-0.306022 0.0100182 -0.603924 -0.735884
+0.306022 0.0100181 0.735884 0.603924
+0.304712 0.0300115 0.69481 0.65076
+0.302097 0.0498763 0.65076 0.69481
+0.298188 0.0695276 0.603924 0.735884
+0.293002 0.0888812 0.554502 0.773807
+0.286561 0.107854 0.502706 0.808416
+0.278894 0.126365 0.448756 0.839564
+0.270032 0.144335 0.392885 0.867117
+0.260014 0.161687 0.335332 0.890956
+0.248882 0.178347 0.276343 0.91098
+0.236685 0.194242 0.21617 0.927103
+0.223474 0.209307 0.155072 0.939256
+0.209307 0.223474 0.0933095 0.947388
+0.194242 0.236685 0.0311476 0.951462
+0.178347 0.248882 -0.0311477 0.951462
+0.161687 0.260014 -0.0933096 0.947388
+0.144335 0.270032 -0.155072 0.939256
+0.126365 0.278894 -0.21617 0.927103
+0.107854 0.286562 -0.276343 0.91098
+0.0888812 0.293002 -0.335332 0.890956
+0.0695276 0.298188 -0.392885 0.867117
+0.0498763 0.302097 -0.448756 0.839564
+0.0300115 0.304712 -0.502706 0.808416
+0.0100181 0.306022 -0.554502 0.773807
+-0.0100182 0.306022 -0.603924 0.735884
+-0.0300115 0.304712 -0.650761 0.69481
+-0.0498764 0.302097 -0.69481 0.65076
+-0.0695276 0.298188 -0.735884 0.603924
+-0.0888812 0.293002 -0.773807 0.554502
+-0.107854 0.286561 -0.808416 0.502705
+-0.126365 0.278894 -0.839564 0.448756
+-0.144335 0.270032 -0.867116 0.392885
+-0.161687 0.260014 -0.890956 0.335332
+-0.178347 0.248882 -0.91098 0.276343
+-0.194242 0.236685 -0.927103 0.21617
+-0.209307 0.223474 -0.939256 0.155072
+-0.223474 0.209307 -0.947388 0.0933096
+-0.236685 0.194242 -0.951462 0.0311476
+-0.248882 0.178347 -0.951462 -0.0311476
+-0.260014 0.161687 -0.947388 -0.0933096
+-0.270032 0.144335 -0.939256 -0.155072
+-0.278894 0.126365 -0.927103 -0.21617
+-0.286562 0.107854 -0.91098 -0.276343
+-0.293002 0.0888812 -0.890956 -0.335332
+-0.298188 0.0695276 -0.867116 -0.392885
+-0.302097 0.0498764 -0.839564 -0.448756
+-0.304712 0.0300115 -0.808416 -0.502706
+-0.306022 0.0100182 -0.773807 -0.554502
+0.357026 0.0116878 0.63849 0.681709
+0.355497 0.0350134 0.592537 0.722008
+0.352446 0.0581891 0.544047 0.759216
+0.347886 0.0811156 0.493227 0.793173
+0.341836 0.103695 0.440295 0.823733
+0.334322 0.12583 0.385477 0.850766
+0.325376 0.147426 0.329009 0.874156
+0.315037 0.168391 0.271132 0.893803
+0.30335 0.188635 0.212094 0.909622
+0.290363 0.208071 0.152148 0.921546
+0.276133 0.226616 0.0915501 0.929524
+0.26072 0.244191 0.0305603 0.933521
+0.244191 0.26072 -0.0305603 0.933521
+0.226616 0.276133 -0.0915501 0.929524
+0.208071 0.290363 -0.152148 0.921546
+0.188635 0.30335 -0.212094 0.909622
+0.168391 0.315038 -0.271132 0.893803
+0.147426 0.325376 -0.329009 0.874156
+0.12583 0.334322 -0.385477 0.850766
+0.103695 0.341836 -0.440295 0.823733
+0.0811155 0.347886 -0.493227 0.793173
+0.058189 0.352446 -0.544047 0.759216
+0.0350134 0.355497 -0.592537 0.722008
+0.0116878 0.357026 -0.63849 0.681709
+-0.0116879 0.357026 -0.681709 0.63849
+-0.0350134 0.355497 -0.722008 0.592537
+-0.0581891 0.352446 -0.759216 0.544047
+-0.0811156 0.347886 -0.793173 0.493227
+-0.103695 0.341836 -0.823733 0.440294
+-0.12583 0.334322 -0.850766 0.385477
+-0.147426 0.325376 -0.874156 0.329009
+-0.168391 0.315038 -0.893803 0.271132
+-0.188635 0.30335 -0.909622 0.212094
+-0.208071 0.290363 -0.921546 0.152148
+-0.226616 0.276133 -0.929524 0.0915501
+-0.244191 0.26072 -0.933521 0.0305603
+-0.26072 0.244191 -0.933521 -0.0305603
+-0.276133 0.226616 -0.929524 -0.0915501
+-0.290363 0.208071 -0.921546 -0.152148
+-0.30335 0.188635 -0.909622 -0.212094
+-0.315037 0.168391 -0.893803 -0.271132
+-0.325376 0.147426 -0.874156 -0.329009
+-0.334322 0.12583 -0.850766 -0.385477
+-0.341836 0.103695 -0.823733 -0.440295
+-0.347886 0.0811155 -0.793173 -0.493227
+-0.352446 0.0581891 -0.759216 -0.544047
+-0.355497 0.0350134 -0.722008 -0.592537
+-0.357026 0.0116879 -0.681709 -0.63849
+0.255019 0.00834844 0.119929 0.959434
+0.253927 0.0250096 0.0569222 0.965223
+0.251747 0.0415636 -0.0063283 0.966879
+0.24849 0.0579397 -0.0695517 0.964395
+0.244168 0.0740676 -0.132477 0.957782
+0.238801 0.0898784 -0.194836 0.947067
+0.232412 0.105304 -0.256359 0.932296
+0.225027 0.120279 -0.316786 0.913533
+0.216678 0.134739 -0.375855 0.890858
+0.207402 0.148622 -0.433316 0.864369
+0.197238 0.161869 -0.48892 0.834178
+0.186229 0.174422 -0.542431 0.800415
+0.174422 0.186229 -0.593619 0.763225
+0.161869 0.197238 -0.642266 0.722766
+0.148622 0.207402 -0.688162 0.679212
+0.134739 0.216678 -0.731111 0.63275
+0.120279 0.225027 -0.770929 0.583578
+0.105304 0.232412 -0.807447 0.531908
+0.0898784 0.238801 -0.840506 0.477959
+0.0740676 0.244168 -0.869967 0.421964
+0.0579397 0.24849 -0.895702 0.364162
+0.0415636 0.251747 -0.917601 0.304801
+0.0250096 0.253927 -0.935572 0.244134
+0.00834842 0.255019 -0.949536 0.182422
+-0.00834847 0.255019 -0.959434 0.119929
+-0.0250096 0.253927 -0.965223 0.0569221
+-0.0415636 0.251747 -0.966879 -0.00632837
+-0.0579397 0.24849 -0.964395 -0.0695517
+-0.0740677 0.244168 -0.957782 -0.132477
+-0.0898785 0.238801 -0.947066 -0.194836
+-0.105304 0.232412 -0.932296 -0.25636
+-0.120279 0.225027 -0.913533 -0.316786
+-0.134739 0.216678 -0.890858 -0.375855
+-0.148622 0.207402 -0.864369 -0.433316
+-0.161869 0.197238 -0.834178 -0.48892
+-0.174422 0.186229 -0.800415 -0.542431
+-0.186229 0.174422 -0.763225 -0.593619
+-0.197238 0.161869 -0.722766 -0.642266
+-0.207402 0.148622 -0.679212 -0.688162
+-0.216678 0.134739 -0.63275 -0.731111
+-0.225027 0.120279 -0.583578 -0.770929
+-0.232412 0.105304 -0.531908 -0.807447
+-0.238801 0.0898784 -0.477959 -0.840506
+-0.244168 0.0740676 -0.421964 -0.869967
+-0.24849 0.0579397 -0.364162 -0.895702
+-0.251747 0.0415636 -0.304801 -0.917601
+-0.253927 0.0250096 -0.244134 -0.935572
+-0.255019 0.00834847 -0.182422 -0.949536
+0.306022 0.0100181 0.0933095 0.947388
+0.304712 0.0300115 0.0311476 0.951462
+0.302097 0.0498763 -0.0311476 0.951462
+0.298188 0.0695276 -0.0933096 0.947388
+0.293002 0.0888812 -0.155072 0.939256
+0.286561 0.107854 -0.21617 0.927103
+0.278894 0.126365 -0.276343 0.91098
+0.270032 0.144335 -0.335332 0.890956
+0.260014 0.161687 -0.392885 0.867117
+0.248882 0.178347 -0.448756 0.839564
+0.236685 0.194242 -0.502706 0.808416
+0.223474 0.209307 -0.554502 0.773807
+0.209307 0.223474 -0.603924 0.735884
+0.194242 0.236685 -0.65076 0.69481
+0.178347 0.248882 -0.69481 0.65076
+0.161687 0.260014 -0.735884 0.603924
+0.144335 0.270032 -0.773807 0.554502
+0.126365 0.278894 -0.808416 0.502706
+0.107854 0.286562 -0.839564 0.448756
+0.0888812 0.293002 -0.867117 0.392885
+0.0695276 0.298188 -0.890956 0.335332
+0.0498763 0.302097 -0.91098 0.276343
+0.0300115 0.304712 -0.927103 0.21617
+0.0100181 0.306022 -0.939256 0.155072
+-0.0100182 0.306022 -0.947388 0.0933094
+-0.0300115 0.304712 -0.951462 0.0311476
+-0.0498764 0.302097 -0.951462 -0.0311477
+-0.0695276 0.298188 -0.947388 -0.0933096
+-0.0888812 0.293002 -0.939256 -0.155072
+-0.107854 0.286561 -0.927103 -0.21617
+-0.126365 0.278894 -0.91098 -0.276343
+-0.144335 0.270032 -0.890956 -0.335332
+-0.161687 0.260014 -0.867117 -0.392885
+-0.178347 0.248882 -0.839564 -0.448756
+-0.194242 0.236685 -0.808416 -0.502706
+-0.209307 0.223474 -0.773807 -0.554502
+-0.223474 0.209307 -0.735884 -0.603924
+-0.236685 0.194242 -0.69481 -0.650761
+-0.248882 0.178347 -0.650761 -0.69481
+-0.260014 0.161687 -0.603924 -0.735884
+-0.270032 0.144335 -0.554502 -0.773807
+-0.278894 0.126365 -0.502706 -0.808416
+-0.286562 0.107854 -0.448756 -0.839564
+-0.293002 0.0888812 -0.392885 -0.867117
+-0.298188 0.0695276 -0.335332 -0.890956
+-0.302097 0.0498764 -0.276343 -0.91098
+-0.304712 0.0300115 -0.21617 -0.927103
+-0.306022 0.0100182 -0.155072 -0.939256
+0.306022 0.0100181 0.335332 0.890956
+0.304712 0.0300115 0.276343 0.91098
+0.302097 0.0498763 0.21617 0.927103
+0.298188 0.0695276 0.155072 0.939256
+0.293002 0.0888812 0.0933095 0.947388
+0.286561 0.107854 0.0311477 0.951462
+0.278894 0.126365 -0.0311476 0.951462
+0.270032 0.144335 -0.0933096 0.947388
+0.260014 0.161687 -0.155072 0.939256
+0.248882 0.178347 -0.21617 0.927103
+0.236685 0.194242 -0.276343 0.91098
+0.223474 0.209307 -0.335332 0.890956
+0.209307 0.223474 -0.392885 0.867117
+0.194242 0.236685 -0.448756 0.839564
+0.178347 0.248882 -0.502706 0.808416
+0.161687 0.260014 -0.554502 0.773807
+0.144335 0.270032 -0.603924 0.735884
+0.126365 0.278894 -0.650761 0.69481
+0.107854 0.286562 -0.69481 0.65076
+0.0888812 0.293002 -0.735884 0.603924
+0.0695276 0.298188 -0.773807 0.554502
+0.0498763 0.302097 -0.808416 0.502706
+0.0300115 0.304712 -0.839564 0.448756
+0.0100181 0.306022 -0.867117 0.392885
+-0.0100182 0.306022 -0.890956 0.335332
+-0.0300115 0.304712 -0.91098 0.276343
+-0.0498764 0.302097 -0.927103 0.21617
+-0.0695276 0.298188 -0.939256 0.155072
+-0.0888812 0.293002 -0.947388 0.0933094
+-0.107854 0.286561 -0.951462 0.0311475
+-0.126365 0.278894 -0.951462 -0.0311478
+-0.144335 0.270032 -0.947388 -0.0933094
+-0.161687 0.260014 -0.939256 -0.155072
+-0.178347 0.248882 -0.927103 -0.21617
+-0.194242 0.236685 -0.91098 -0.276343
+-0.209307 0.223474 -0.890956 -0.335332
+-0.223474 0.209307 -0.867117 -0.392885
+-0.236685 0.194242 -0.839564 -0.448756
+-0.248882 0.178347 -0.808416 -0.502706
+-0.260014 0.161687 -0.773807 -0.554502
+-0.270032 0.144335 -0.735884 -0.603924
+-0.278894 0.126365 -0.69481 -0.65076
+-0.286562 0.107854 -0.65076 -0.69481
+-0.293002 0.0888812 -0.603924 -0.735884
+-0.298188 0.0695276 -0.554502 -0.773807
+-0.302097 0.0498764 -0.502706 -0.808416
+-0.304712 0.0300115 -0.448756 -0.839564
+-0.306022 0.0100182 -0.392885 -0.867116
+0.357026 0.0116878 0.279477 0.891229
+0.355497 0.0350134 0.22059 0.907599
+0.352446 0.0581891 0.160758 0.920083
+0.347886 0.0811156 0.100237 0.928627
+0.341836 0.103695 0.0392873 0.933195
+0.334322 0.12583 -0.0218307 0.933766
+0.325376 0.147426 -0.0828552 0.930339
+0.315037 0.168391 -0.143525 0.922928
+0.30335 0.188635 -0.20358 0.911565
+0.290363 0.208071 -0.262763 0.896299
+0.276133 0.226616 -0.320821 0.877194
+0.26072 0.244191 -0.377506 0.854333
+0.244191 0.26072 -0.432574 0.827814
+0.226616 0.276133 -0.485789 0.79775
+0.208071 0.290363 -0.536924 0.76427
+0.188635 0.30335 -0.58576 0.727517
+0.168391 0.315038 -0.632088 0.687649
+0.147426 0.325376 -0.675709 0.644836
+0.12583 0.334322 -0.716437 0.599262
+0.103695 0.341836 -0.754096 0.551121
+0.0811155 0.347886 -0.788527 0.500621
+0.058189 0.352446 -0.819581 0.447977
+0.0350134 0.355497 -0.847125 0.393415
+0.0116878 0.357026 -0.871042 0.337168
+-0.0116879 0.357026 -0.891229 0.279477
+-0.0350134 0.355497 -0.907599 0.22059
+-0.0581891 0.352446 -0.920083 0.160757
+-0.0811156 0.347886 -0.928627 0.100237
+-0.103695 0.341836 -0.933195 0.0392871
+-0.12583 0.334322 -0.933766 -0.0218308
+-0.147426 0.325376 -0.930339 -0.0828553
+-0.168391 0.315038 -0.922928 -0.143525
+-0.188635 0.30335 -0.911565 -0.20358
+-0.208071 0.290363 -0.896299 -0.262763
+-0.226616 0.276133 -0.877194 -0.320821
+-0.244191 0.26072 -0.854333 -0.377506
+-0.26072 0.244191 -0.827814 -0.432574
+-0.276133 0.226616 -0.79775 -0.485789
+-0.290363 0.208071 -0.76427 -0.536924
+-0.30335 0.188635 -0.727517 -0.58576
+-0.315037 0.168391 -0.687649 -0.632088
+-0.325376 0.147426 -0.644836 -0.675709
+-0.334322 0.12583 -0.599262 -0.716437
+-0.341836 0.103695 -0.551121 -0.754096
+-0.347886 0.0811155 -0.500621 -0.788527
+-0.352446 0.0581891 -0.447977 -0.819581
+-0.355497 0.0350134 -0.393415 -0.847125
+-0.357026 0.0116879 -0.337168 -0.871042
+0.357026 0.0116878 0.0741531 0.931073
+0.355497 0.0350134 0.0130992 0.933929
+0.352446 0.0581891 -0.0480108 0.932787
+0.347886 0.0811156 -0.108915 0.927649
+0.341836 0.103695 -0.169353 0.91854
+0.334322 0.12583 -0.229066 0.905497
+0.325376 0.147426 -0.287798 0.888577
+0.315037 0.168391 -0.345297 0.867851
+0.30335 0.188635 -0.401318 0.843409
+0.290363 0.208071 -0.45562 0.815356
+0.276133 0.226616 -0.507972 0.783811
+0.26072 0.244191 -0.558148 0.74891
+0.244191 0.26072 -0.605934 0.710802
+0.226616 0.276133 -0.651125 0.66965
+0.208071 0.290363 -0.693528 0.625631
+0.188635 0.30335 -0.732962 0.578932
+0.168391 0.315038 -0.769256 0.529755
+0.147426 0.325376 -0.802257 0.478309
+0.12583 0.334322 -0.831822 0.424815
+0.103695 0.341836 -0.857825 0.369501
+0.0811155 0.347886 -0.880155 0.312606
+0.058189 0.352446 -0.898716 0.254371
+0.0350134 0.355497 -0.913429 0.195048
+0.0116878 0.357026 -0.92423 0.134889
+-0.0116879 0.357026 -0.931073 0.074153
+-0.0350134 0.355497 -0.933929 0.0130991
+-0.0581891 0.352446 -0.932787 -0.0480108
+-0.0811156 0.347886 -0.927649 -0.108915
+-0.103695 0.341836 -0.91854 -0.169353
+-0.12583 0.334322 -0.905497 -0.229066
+-0.147426 0.325376 -0.888577 -0.287798
+-0.168391 0.315038 -0.867851 -0.345297
+-0.188635 0.30335 -0.84341 -0.401318
+-0.208071 0.290363 -0.815356 -0.455621
+-0.226616 0.276133 -0.783812 -0.507972
+-0.244191 0.26072 -0.74891 -0.558148
+-0.26072 0.244191 -0.710802 -0.605934
+-0.276133 0.226616 -0.66965 -0.651125
+-0.290363 0.208071 -0.625631 -0.693528
+-0.30335 0.188635 -0.578933 -0.732962
+-0.315037 0.168391 -0.529755 -0.769256
+-0.325376 0.147426 -0.478309 -0.802257
+-0.334322 0.12583 -0.424815 -0.831822
+-0.341836 0.103695 -0.369501 -0.857825
+-0.347886 0.0811155 -0.312606 -0.880155
+-0.352446 0.0581891 -0.254372 -0.898716
+-0.355497 0.0350134 -0.195048 -0.913429
+-0.357026 0.0116879 -0.134889 -0.92423
+0.40803 0.0133575 0.0597046 0.910916
+0.406282 0.0400153 -2.49393e-09 0.912871
+0.402795 0.0665018 -0.0597046 0.910916
+0.397584 0.0927035 -0.119154 0.905061
+0.390669 0.118508 -0.178092 0.89533
+0.382082 0.143805 -0.236268 0.881766
+0.371859 0.168487 -0.293433 0.864425
+0.360043 0.192447 -0.349341 0.843383
+0.346685 0.215583 -0.403752 0.818729
+0.331843 0.237796 -0.456435 0.790569
+0.31558 0.25899 -0.507164 0.759024
+0.297966 0.279075 -0.555721 0.724229
+0.279075 0.297966 -0.601898 0.686333
+0.25899 0.31558 -0.645497 0.645497
+0.237796 0.331843 -0.686333 0.601898
+0.215583 0.346685 -0.724229 0.555721
+0.192447 0.360043 -0.759024 0.507164
+0.168487 0.371859 -0.790569 0.456435
+0.143805 0.382082 -0.818729 0.403752
+0.118508 0.390669 -0.843383 0.349341
+0.0927035 0.397584 -0.864425 0.293433
+0.0665017 0.402795 -0.881766 0.236268
+0.0400153 0.406282 -0.89533 0.178092
+0.0133575 0.40803 -0.905061 0.119153
+-0.0133575 0.40803 -0.910916 0.0597045
+-0.0400154 0.406282 -0.912871 -7.64039e-08
+-0.0665018 0.402795 -0.910916 -0.0597047
+-0.0927035 0.397584 -0.905061 -0.119154
+-0.118508 0.390669 -0.89533 -0.178092
+-0.143806 0.382082 -0.881766 -0.236269
+-0.168487 0.371859 -0.864425 -0.293433
+-0.192447 0.360043 -0.843383 -0.34934
+-0.215583 0.346685 -0.818729 -0.403752
+-0.237796 0.331843 -0.790569 -0.456436
+-0.25899 0.31558 -0.759024 -0.507164
+-0.279075 0.297966 -0.724229 -0.555721
+-0.297966 0.279075 -0.686333 -0.601898
+-0.31558 0.25899 -0.645497 -0.645497
+-0.331843 0.237796 -0.601898 -0.686333
+-0.346685 0.215583 -0.555721 -0.724229
+-0.360043 0.192447 -0.507164 -0.759024
+-0.371859 0.168487 -0.456435 -0.790569
+-0.382082 0.143805 -0.403752 -0.818729
+-0.390669 0.118508 -0.349341 -0.843383
+-0.397584 0.0927035 -0.293433 -0.864425
+-0.402795 0.0665018 -0.236268 -0.881766
+-0.406282 0.0400153 -0.178092 -0.89533
+-0.40803 0.0133575 -0.119154 -0.905061
+0.40803 0.0133575 0.236268 0.881766
+0.406282 0.0400153 0.178092 0.89533
+0.402795 0.0665018 0.119154 0.905061
+0.397584 0.0927035 0.0597046 0.910916
+0.390669 0.118508 -6.80367e-10 0.912871
+0.382082 0.143805 -0.0597046 0.910916
+0.371859 0.168487 -0.119154 0.905061
+0.360043 0.192447 -0.178092 0.89533
+0.346685 0.215583 -0.236268 0.881766
+0.331843 0.237796 -0.293433 0.864425
+0.31558 0.25899 -0.349341 0.843383
+0.297966 0.279075 -0.403753 0.818729
+0.279075 0.297966 -0.456435 0.790569
+0.25899 0.31558 -0.507164 0.759024
+0.237796 0.331843 -0.555721 0.724229
+0.215583 0.346685 -0.601898 0.686333
+0.192447 0.360043 -0.645497 0.645497
+0.168487 0.371859 -0.686333 0.601898
+0.143805 0.382082 -0.724229 0.555721
+0.118508 0.390669 -0.759024 0.507164
+0.0927035 0.397584 -0.790569 0.456435
+0.0665017 0.402795 -0.818729 0.403752
+0.0400153 0.406282 -0.843383 0.34934
+0.0133575 0.40803 -0.864425 0.293433
+-0.0133575 0.40803 -0.881766 0.236268
+-0.0400154 0.406282 -0.89533 0.178092
+-0.0665018 0.402795 -0.905061 0.119154
+-0.0927035 0.397584 -0.910916 0.0597046
+-0.118508 0.390669 -0.912871 -1.49406e-07
+-0.143806 0.382082 -0.910916 -0.0597048
+-0.168487 0.371859 -0.905061 -0.119154
+-0.192447 0.360043 -0.89533 -0.178092
+-0.215583 0.346685 -0.881766 -0.236268
+-0.237796 0.331843 -0.864425 -0.293433
+-0.25899 0.31558 -0.843383 -0.349341
+-0.279075 0.297966 -0.818729 -0.403753
+-0.297966 0.279075 -0.790569 -0.456435
+-0.31558 0.25899 -0.759024 -0.507164
+-0.331843 0.237796 -0.724229 -0.555721
+-0.346685 0.215583 -0.686333 -0.601898
+-0.360043 0.192447 -0.645497 -0.645497
+-0.371859 0.168487 -0.601898 -0.686333
+-0.382082 0.143805 -0.555721 -0.724229
+-0.390669 0.118508 -0.507164 -0.759024
+-0.397584 0.0927035 -0.456435 -0.790569
+-0.402795 0.0665018 -0.403753 -0.818729
+-0.406282 0.0400153 -0.349341 -0.843383
+-0.40803 0.0133575 -0.293433 -0.864425
+0.456191 0.0149342 0.144937 0.877872
+0.454238 0.0447385 0.0872114 0.885472
+0.450339 0.0743513 0.029112 0.88928
+0.444512 0.103646 -0.029112 0.88928
+0.436782 0.132496 -0.0872114 0.885472
+0.427181 0.160779 -0.144937 0.877872
+0.415751 0.188374 -0.202043 0.866513
+0.40254 0.215162 -0.258283 0.851444
+0.387606 0.241029 -0.313417 0.832728
+0.371012 0.265863 -0.367209 0.810447
+0.352829 0.28956 -0.419428 0.784695
+0.333136 0.312016 -0.469852 0.755583
+0.312016 0.333136 -0.518263 0.723236
+0.28956 0.352829 -0.564456 0.687791
+0.265863 0.371012 -0.608231 0.649401
+0.241029 0.387606 -0.649401 0.608231
+0.215162 0.40254 -0.687791 0.564456
+0.188374 0.415751 -0.723236 0.518263
+0.160779 0.427181 -0.755583 0.469852
+0.132496 0.436782 -0.784695 0.419428
+0.103646 0.444512 -0.810447 0.367209
+0.0743512 0.450339 -0.832728 0.313417
+0.0447384 0.454238 -0.851444 0.258283
+0.0149341 0.456191 -0.866513 0.202042
+-0.0149342 0.456191 -0.877872 0.144937
+-0.0447385 0.454238 -0.885472 0.0872113
+-0.0743513 0.450339 -0.88928 0.029112
+-0.103646 0.444512 -0.88928 -0.0291121
+-0.132496 0.436781 -0.885472 -0.0872115
+-0.160779 0.427181 -0.877872 -0.144937
+-0.188374 0.415751 -0.866513 -0.202043
+-0.215162 0.40254 -0.851444 -0.258283
+-0.241029 0.387606 -0.832728 -0.313417
+-0.265864 0.371012 -0.810447 -0.367209
+-0.28956 0.352829 -0.784695 -0.419428
+-0.312016 0.333136 -0.755583 -0.469852
+-0.333136 0.312016 -0.723236 -0.518263
+-0.352829 0.28956 -0.687791 -0.564456
+-0.371012 0.265864 -0.649401 -0.608231
+-0.387606 0.241029 -0.608231 -0.649401
+-0.40254 0.215162 -0.564456 -0.687791
+-0.415751 0.188374 -0.518263 -0.723236
+-0.427181 0.160779 -0.469852 -0.755583
+-0.436782 0.132496 -0.419428 -0.784695
+-0.444512 0.103646 -0.367209 -0.810447
+-0.450339 0.0743513 -0.313417 -0.832728
+-0.454238 0.0447385 -0.258283 -0.851444
+-0.456191 0.0149342 -0.202043 -0.866513
+0.357026 0.0116878 0.470787 0.806694
+0.355497 0.0350134 0.417019 0.835758
+0.352446 0.0581891 0.361465 0.861243
+0.347886 0.0811156 0.304363 0.88304
+0.341836 0.103695 0.245958 0.901055
+0.334322 0.12583 0.186499 0.915212
+0.325376 0.147426 0.126242 0.925451
+0.315037 0.168391 0.0654444 0.931726
+0.30335 0.188635 0.00436652 0.934011
+0.290363 0.208071 -0.0567301 0.932297
+0.276133 0.226616 -0.117584 0.92659
+0.26072 0.244191 -0.177934 0.916916
+0.244191 0.26072 -0.237522 0.903316
+0.226616 0.276133 -0.296093 0.885847
+0.208071 0.290363 -0.353396 0.864585
+0.188635 0.30335 -0.409186 0.83962
+0.168391 0.315038 -0.463224 0.811061
+0.147426 0.325376 -0.515278 0.779028
+0.12583 0.334322 -0.565126 0.743659
+0.103695 0.341836 -0.612553 0.705106
+0.0811155 0.347886 -0.657358 0.663533
+0.058189 0.352446 -0.699348 0.619119
+0.0350134 0.355497 -0.738343 0.572054
+0.0116878 0.357026 -0.774176 0.522539
+-0.0116879 0.357026 -0.806694 0.470787
+-0.0350134 0.355497 -0.835758 0.417019
+-0.0581891 0.352446 -0.861243 0.361465
+-0.0811156 0.347886 -0.88304 0.304363
+-0.103695 0.341836 -0.901055 0.245957
+-0.12583 0.334322 -0.915213 0.186499
+-0.147426 0.325376 -0.925451 0.126242
+-0.168391 0.315038 -0.931726 0.0654445
+-0.188635 0.30335 -0.934011 0.00436653
+-0.208071 0.290363 -0.932297 -0.0567302
+-0.226616 0.276133 -0.92659 -0.117584
+-0.244191 0.26072 -0.916916 -0.177934
+-0.26072 0.244191 -0.903316 -0.237522
+-0.276133 0.226616 -0.885847 -0.296093
+-0.290363 0.208071 -0.864585 -0.353396
+-0.30335 0.188635 -0.83962 -0.409186
+-0.315037 0.168391 -0.811061 -0.463224
+-0.325376 0.147426 -0.779028 -0.515278
+-0.334322 0.12583 -0.743659 -0.565126
+-0.341836 0.103695 -0.705106 -0.612553
+-0.347886 0.0811155 -0.663533 -0.657358
+-0.352446 0.0581891 -0.619119 -0.699348
+-0.355497 0.0350134 -0.572054 -0.738343
+-0.357026 0.0116879 -0.522539 -0.774176
+0.40803 0.0133575 0.403752 0.818729
+0.406282 0.0400153 0.349341 0.843383
+0.402795 0.0665018 0.293433 0.864425
+0.397584 0.0927035 0.236268 0.881766
+0.390669 0.118508 0.178092 0.89533
+0.382082 0.143805 0.119154 0.905061
+0.371859 0.168487 0.0597046 0.910916
+0.360043 0.192447 -1.92711e-08 0.912871
+0.346685 0.215583 -0.0597046 0.910916
+0.331843 0.237796 -0.119154 0.905061
+0.31558 0.25899 -0.178092 0.89533
+0.297966 0.279075 -0.236268 0.881766
+0.279075 0.297966 -0.293433 0.864425
+0.25899 0.31558 -0.349341 0.843383
+0.237796 0.331843 -0.403753 0.818729
+0.215583 0.346685 -0.456436 0.790569
+0.192447 0.360043 -0.507164 0.759024
+0.168487 0.371859 -0.555721 0.724229
+0.143805 0.382082 -0.601898 0.686333
+0.118508 0.390669 -0.645497 0.645497
+0.0927035 0.397584 -0.686333 0.601898
+0.0665017 0.402795 -0.724229 0.555721
+0.0400153 0.406282 -0.759024 0.507164
+0.0133575 0.40803 -0.790569 0.456435
+-0.0133575 0.40803 -0.818729 0.403752
+-0.0400154 0.406282 -0.843383 0.349341
+-0.0665018 0.402795 -0.864425 0.293433
+-0.0927035 0.397584 -0.881766 0.236268
+-0.118508 0.390669 -0.89533 0.178092
+-0.143806 0.382082 -0.905061 0.119153
+-0.168487 0.371859 -0.910916 0.0597045
+-0.192447 0.360043 -0.912871 1.0406e-07
+-0.215583 0.346685 -0.910916 -0.0597046
+-0.237796 0.331843 -0.905061 -0.119154
+-0.25899 0.31558 -0.89533 -0.178092
+-0.279075 0.297966 -0.881766 -0.236268
+-0.297966 0.279075 -0.864425 -0.293433
+-0.31558 0.25899 -0.843383 -0.349341
+-0.331843 0.237796 -0.818729 -0.403752
+-0.346685 0.215583 -0.790569 -0.456435
+-0.360043 0.192447 -0.759024 -0.507164
+-0.371859 0.168487 -0.724229 -0.555721
+-0.382082 0.143805 -0.686333 -0.601898
+-0.390669 0.118508 -0.645497 -0.645497
+-0.397584 0.0927035 -0.601898 -0.686333
+-0.402795 0.0665018 -0.555721 -0.724229
+-0.406282 0.0400153 -0.507164 -0.759024
+-0.40803 0.0133575 -0.456436 -0.790569
+0.40803 0.0133575 0.555721 0.724229
+0.406282 0.0400153 0.507164 0.759024
+0.402795 0.0665018 0.456435 0.790569
+0.397584 0.0927035 0.403752 0.818729
+0.390669 0.118508 0.349341 0.843383
+0.382082 0.143805 0.293433 0.864425
+0.371859 0.168487 0.236268 0.881766
+0.360043 0.192447 0.178092 0.89533
+0.346685 0.215583 0.119154 0.905061
+0.331843 0.237796 0.0597046 0.910916
+0.31558 0.25899 1.65496e-08 0.912871
+0.297966 0.279075 -0.0597046 0.910916
+0.279075 0.297966 -0.119154 0.905061
+0.25899 0.31558 -0.178092 0.89533
+0.237796 0.331843 -0.236268 0.881766
+0.215583 0.346685 -0.293433 0.864425
+0.192447 0.360043 -0.349341 0.843383
+0.168487 0.371859 -0.403753 0.818729
+0.143805 0.382082 -0.456436 0.790569
+0.118508 0.390669 -0.507164 0.759024
+0.0927035 0.397584 -0.555721 0.724229
+0.0665017 0.402795 -0.601898 0.686333
+0.0400153 0.406282 -0.645497 0.645497
+0.0133575 0.40803 -0.686333 0.601898
+-0.0133575 0.40803 -0.724229 0.555721
+-0.0400154 0.406282 -0.759024 0.507164
+-0.0665018 0.402795 -0.790569 0.456435
+-0.0927035 0.397584 -0.818729 0.403752
+-0.118508 0.390669 -0.843383 0.34934
+-0.143806 0.382082 -0.864425 0.293433
+-0.168487 0.371859 -0.881766 0.236268
+-0.192447 0.360043 -0.89533 0.178092
+-0.215583 0.346685 -0.905061 0.119154
+-0.237796 0.331843 -0.910916 0.0597045
+-0.25899 0.31558 -0.912871 3.10581e-08
+-0.279075 0.297966 -0.910916 -0.0597047
+-0.297966 0.279075 -0.905061 -0.119154
+-0.31558 0.25899 -0.89533 -0.178092
+-0.331843 0.237796 -0.881766 -0.236268
+-0.346685 0.215583 -0.864425 -0.293433
+-0.360043 0.192447 -0.843383 -0.34934
+-0.371859 0.168487 -0.818729 -0.403752
+-0.382082 0.143805 -0.790569 -0.456436
+-0.390669 0.118508 -0.759024 -0.507164
+-0.397584 0.0927035 -0.724229 -0.555721
+-0.402795 0.0665018 -0.686333 -0.601898
+-0.406282 0.0400153 -0.645497 -0.645497
+-0.40803 0.0133575 -0.601898 -0.686333
+0.456191 0.0149342 0.469852 0.755583
+0.454238 0.0447385 0.419428 0.784695
+0.450339 0.0743513 0.367209 0.810447
+0.444512 0.103646 0.313417 0.832728
+0.436782 0.132496 0.258283 0.851444
+0.427181 0.160779 0.202043 0.866513
+0.415751 0.188374 0.144937 0.877872
+0.40254 0.215162 0.0872114 0.885472
+0.387606 0.241029 0.029112 0.88928
+0.371012 0.265863 -0.029112 0.88928
+0.352829 0.28956 -0.0872114 0.885472
+0.333136 0.312016 -0.144937 0.877872
+0.312016 0.333136 -0.202043 0.866513
+0.28956 0.352829 -0.258283 0.851444
+0.265863 0.371012 -0.313417 0.832728
+0.241029 0.387606 -0.367209 0.810447
+0.215162 0.40254 -0.419428 0.784695
+0.188374 0.415751 -0.469852 0.755583
+0.160779 0.427181 -0.518263 0.723236
+0.132496 0.436782 -0.564456 0.687791
+0.103646 0.444512 -0.608231 0.649401
+0.0743512 0.450339 -0.649401 0.608231
+0.0447384 0.454238 -0.687791 0.564455
+0.0149341 0.456191 -0.723236 0.518263
+-0.0149342 0.456191 -0.755583 0.469852
+-0.0447385 0.454238 -0.784695 0.419428
+-0.0743513 0.450339 -0.810447 0.367209
+-0.103646 0.444512 -0.832728 0.313417
+-0.132496 0.436781 -0.851444 0.258283
+-0.160779 0.427181 -0.866513 0.202042
+-0.188374 0.415751 -0.877872 0.144937
+-0.215162 0.40254 -0.885472 0.0872115
+-0.241029 0.387606 -0.88928 0.029112
+-0.265864 0.371012 -0.88928 -0.0291121
+-0.28956 0.352829 -0.885472 -0.0872114
+-0.312016 0.333136 -0.877872 -0.144937
+-0.333136 0.312016 -0.866513 -0.202043
+-0.352829 0.28956 -0.851444 -0.258283
+-0.371012 0.265864 -0.832728 -0.313417
+-0.387606 0.241029 -0.810447 -0.367209
+-0.40254 0.215162 -0.784695 -0.419428
+-0.415751 0.188374 -0.755583 -0.469852
+-0.427181 0.160779 -0.723236 -0.518263
+-0.436782 0.132496 -0.687791 -0.564456
+-0.444512 0.103646 -0.649401 -0.608231
+-0.450339 0.0743513 -0.608231 -0.649401
+-0.454238 0.0447385 -0.564456 -0.687791
+-0.456191 0.0149342 -0.518263 -0.723236
+0.456191 0.0149342 0.313417 0.832728
+0.454238 0.0447385 0.258283 0.851444
+0.450339 0.0743513 0.202043 0.866513
+0.444512 0.103646 0.144937 0.877872
+0.436782 0.132496 0.0872114 0.885472
+0.427181 0.160779 0.029112 0.88928
+0.415751 0.188374 -0.029112 0.88928
+0.40254 0.215162 -0.0872114 0.885472
+0.387606 0.241029 -0.144937 0.877872
+0.371012 0.265863 -0.202043 0.866513
+0.352829 0.28956 -0.258283 0.851444
+0.333136 0.312016 -0.313417 0.832728
+0.312016 0.333136 -0.367209 0.810447
+0.28956 0.352829 -0.419428 0.784695
+0.265863 0.371012 -0.469852 0.755583
+0.241029 0.387606 -0.518263 0.723236
+0.215162 0.40254 -0.564456 0.687791
+0.188374 0.415751 -0.608231 0.649401
+0.160779 0.427181 -0.649401 0.608231
+0.132496 0.436782 -0.687791 0.564456
+0.103646 0.444512 -0.723236 0.518263
+0.0743512 0.450339 -0.755583 0.469852
+0.0447384 0.454238 -0.784695 0.419428
+0.0149341 0.456191 -0.810447 0.367209
+-0.0149342 0.456191 -0.832728 0.313417
+-0.0447385 0.454238 -0.851444 0.258283
+-0.0743513 0.450339 -0.866513 0.202043
+-0.103646 0.444512 -0.877872 0.144937
+-0.132496 0.436781 -0.885472 0.0872112
+-0.160779 0.427181 -0.88928 0.0291119
+-0.188374 0.415751 -0.88928 -0.0291121
+-0.215162 0.40254 -0.885472 -0.0872113
+-0.241029 0.387606 -0.877872 -0.144937
+-0.265864 0.371012 -0.866513 -0.202043
+-0.28956 0.352829 -0.851444 -0.258283
+-0.312016 0.333136 -0.832728 -0.313417
+-0.333136 0.312016 -0.810447 -0.367209
+-0.352829 0.28956 -0.784695 -0.419428
+-0.371012 0.265864 -0.755583 -0.469852
+-0.387606 0.241029 -0.723236 -0.518263
+-0.40254 0.215162 -0.687791 -0.564455
+-0.415751 0.188374 -0.649401 -0.608231
+-0.427181 0.160779 -0.608231 -0.649401
+-0.436782 0.132496 -0.564456 -0.687791
+-0.444512 0.103646 -0.518263 -0.723236
+-0.450339 0.0743513 -0.469852 -0.755583
+-0.454238 0.0447385 -0.419428 -0.784695
+-0.456191 0.0149342 -0.367209 -0.810447
+0.499732 0.0163595 0.224144 0.836516
+0.497592 0.0490086 0.168953 0.849385
+0.493322 0.0814477 0.113039 0.858616
+0.486938 0.113538 0.0566408 0.864171
+0.47847 0.145142 -6.45453e-10 0.866025
+0.467953 0.176125 -0.0566408 0.864171
+0.455432 0.206354 -0.113039 0.858616
+0.440961 0.235698 -0.168953 0.849385
+0.424601 0.264034 -0.224144 0.836516
+0.406423 0.291239 -0.278375 0.820066
+0.386505 0.317197 -0.331414 0.800103
+0.364932 0.341796 -0.383033 0.776715
+0.341796 0.364932 -0.433013 0.75
+0.317197 0.386505 -0.481138 0.720074
+0.291239 0.406423 -0.527203 0.687064
+0.264034 0.424601 -0.57101 0.651112
+0.235698 0.440961 -0.612372 0.612372
+0.206353 0.455432 -0.651112 0.57101
+0.176125 0.467953 -0.687064 0.527203
+0.145142 0.47847 -0.720074 0.481138
+0.113538 0.486938 -0.75 0.433013
+0.0814477 0.493322 -0.776715 0.383033
+0.0490085 0.497592 -0.800103 0.331413
+0.0163595 0.499732 -0.820066 0.278375
+-0.0163596 0.499732 -0.836516 0.224144
+-0.0490086 0.497592 -0.849385 0.168953
+-0.0814478 0.493322 -0.858616 0.113039
+-0.113538 0.486938 -0.864171 0.0566407
+-0.145142 0.47847 -0.866025 -1.41739e-07
+-0.176125 0.467953 -0.864171 -0.0566409
+-0.206354 0.455432 -0.858616 -0.113039
+-0.235698 0.440961 -0.849385 -0.168953
+-0.264034 0.424601 -0.836516 -0.224144
+-0.291239 0.406423 -0.820066 -0.278375
+-0.317197 0.386505 -0.800103 -0.331414
+-0.341796 0.364932 -0.776715 -0.383033
+-0.364932 0.341796 -0.75 -0.433013
+-0.386505 0.317197 -0.720074 -0.481138
+-0.406423 0.291239 -0.687064 -0.527203
+-0.424601 0.264034 -0.651112 -0.57101
+-0.440961 0.235698 -0.612373 -0.612372
+-0.455432 0.206354 -0.57101 -0.651112
+-0.467953 0.176125 -0.527203 -0.687064
+-0.47847 0.145142 -0.481138 -0.720074
+-0.486938 0.113538 -0.433013 -0.75
+-0.493322 0.0814478 -0.383033 -0.776715
+-0.497592 0.0490085 -0.331414 -0.800103
+-0.499732 0.0163596 -0.278375 -0.820066
+0.499732 0.0163595 0.383033 0.776715
+0.497592 0.0490086 0.331414 0.800103
+0.493322 0.0814477 0.278375 0.820066
+0.486938 0.113538 0.224144 0.836516
+0.47847 0.145142 0.168953 0.849385
+0.467953 0.176125 0.113039 0.858616
+0.455432 0.206354 0.0566408 0.864171
+0.440961 0.235698 -1.82821e-08 0.866025
+0.424601 0.264034 -0.0566408 0.864171
+0.406423 0.291239 -0.113039 0.858616
+0.386505 0.317197 -0.168953 0.849385
+0.364932 0.341796 -0.224144 0.836516
+0.341796 0.364932 -0.278375 0.820066
+0.317197 0.386505 -0.331414 0.800103
+0.291239 0.406423 -0.383033 0.776715
+0.264034 0.424601 -0.433013 0.75
+0.235698 0.440961 -0.481138 0.720074
+0.206353 0.455432 -0.527203 0.687064
+0.176125 0.467953 -0.57101 0.651112
+0.145142 0.47847 -0.612372 0.612372
+0.113538 0.486938 -0.651112 0.57101
+0.0814477 0.493322 -0.687064 0.527203
+0.0490085 0.497592 -0.720074 0.481138
+0.0163595 0.499732 -0.75 0.433013
+-0.0163596 0.499732 -0.776715 0.383033
+-0.0490086 0.497592 -0.800103 0.331414
+-0.0814478 0.493322 -0.820066 0.278375
+-0.113538 0.486938 -0.836516 0.224144
+-0.145142 0.47847 -0.849385 0.168953
+-0.176125 0.467953 -0.858616 0.113039
+-0.206354 0.455432 -0.864171 0.0566407
+-0.235698 0.440961 -0.866025 9.87201e-08
+-0.264034 0.424601 -0.864171 -0.0566408
+-0.291239 0.406423 -0.858616 -0.113039
+-0.317197 0.386505 -0.849385 -0.168953
+-0.341796 0.364932 -0.836516 -0.224144
+-0.364932 0.341796 -0.820066 -0.278375
+-0.386505 0.317197 -0.800103 -0.331414
+-0.406423 0.291239 -0.776715 -0.383033
+-0.424601 0.264034 -0.75 -0.433013
+-0.440961 0.235698 -0.720074 -0.481138
+-0.455432 0.206354 -0.687064 -0.527203
+-0.467953 0.176125 -0.651112 -0.57101
+-0.47847 0.145142 -0.612372 -0.612372
+-0.486938 0.113538 -0.57101 -0.651112
+-0.493322 0.0814478 -0.527203 -0.687064
+-0.497592 0.0490085 -0.481138 -0.720074
+-0.499732 0.0163596 -0.433013 -0.75
+0.539773 0.0176703 0.296463 0.787682
+0.537461 0.0529353 0.244311 0.805385
+0.532848 0.0879736 0.191113 0.81964
+0.525954 0.122635 0.137097 0.830384
+0.516807 0.156772 0.0824937 0.837573
+0.505447 0.190237 0.0275372 0.841175
+0.491923 0.222887 -0.0275372 0.841175
+0.476292 0.254583 -0.0824937 0.837573
+0.458622 0.285189 -0.137097 0.830384
+0.438987 0.314574 -0.191113 0.81964
+0.417473 0.342612 -0.244311 0.805385
+0.394172 0.369182 -0.296463 0.787682
+0.369182 0.394172 -0.347345 0.766606
+0.342612 0.417473 -0.396739 0.742247
+0.314574 0.438987 -0.444435 0.71471
+0.285189 0.458622 -0.490228 0.684112
+0.254583 0.476292 -0.533922 0.650585
+0.222887 0.491923 -0.575329 0.614272
+0.190237 0.505447 -0.614272 0.575329
+0.156772 0.516807 -0.650585 0.533921
+0.122635 0.525954 -0.684112 0.490228
+0.0879735 0.532848 -0.71471 0.444435
+0.0529352 0.537461 -0.742247 0.396739
+0.0176703 0.539773 -0.766606 0.347345
+-0.0176704 0.539773 -0.787682 0.296463
+-0.0529354 0.537461 -0.805385 0.244311
+-0.0879736 0.532848 -0.81964 0.191113
+-0.122635 0.525954 -0.830384 0.137097
+-0.156772 0.516807 -0.837573 0.0824936
+-0.190237 0.505447 -0.841175 0.0275371
+-0.222887 0.491923 -0.841175 -0.0275373
+-0.254583 0.476292 -0.837573 -0.0824936
+-0.285189 0.458622 -0.830384 -0.137097
+-0.314574 0.438987 -0.81964 -0.191113
+-0.342612 0.417473 -0.805385 -0.244311
+-0.369182 0.394172 -0.787682 -0.296463
+-0.394172 0.369182 -0.766606 -0.347345
+-0.417473 0.342612 -0.742247 -0.39674
+-0.438987 0.314574 -0.71471 -0.444435
+-0.458622 0.285189 -0.684112 -0.490228
+-0.476292 0.254583 -0.650585 -0.533921
+-0.491923 0.222887 -0.614272 -0.575329
+-0.505447 0.190237 -0.575329 -0.614272
+-0.516807 0.156772 -0.533922 -0.650585
+-0.525954 0.122635 -0.490228 -0.684112
+-0.532848 0.0879736 -0.444435 -0.71471
+-0.537461 0.0529353 -0.396739 -0.742247
+-0.539773 0.0176704 -0.347345 -0.766606
+0.255019 0.00834844 0.949536 0.182422
+0.253927 0.0250096 0.935572 0.244134
+0.251747 0.0415636 0.917601 0.304801
+0.24849 0.0579397 0.895702 0.364162
+0.244168 0.0740676 0.869967 0.421964
+0.238801 0.0898784 0.840506 0.477959
+0.232412 0.105304 0.807447 0.531908
+0.225027 0.120279 0.770929 0.583578
+0.216678 0.134739 0.731111 0.63275
+0.207402 0.148622 0.688162 0.679212
+0.197238 0.161869 0.642266 0.722766
+0.186229 0.174422 0.593619 0.763225
+0.174422 0.186229 0.542431 0.800415
+0.161869 0.197238 0.48892 0.834178
+0.148622 0.207402 0.433315 0.864369
+0.134739 0.216678 0.375855 0.890858
+0.120279 0.225027 0.316786 0.913533
+0.105304 0.232412 0.256359 0.932296
+0.0898784 0.238801 0.194835 0.947067
+0.0740676 0.244168 0.132477 0.957782
+0.0579397 0.24849 0.0695517 0.964395
+0.0415636 0.251747 0.00632817 0.966879
+0.0250096 0.253927 -0.0569223 0.965223
+0.00834842 0.255019 -0.119929 0.959434
+-0.00834847 0.255019 -0.182422 0.949536
+-0.0250096 0.253927 -0.244134 0.935572
+-0.0415636 0.251747 -0.304801 0.917601
+-0.0579397 0.24849 -0.364162 0.895702
+-0.0740677 0.244168 -0.421964 0.869967
+-0.0898785 0.238801 -0.477959 0.840506
+-0.105304 0.232412 -0.531908 0.807447
+-0.120279 0.225027 -0.583578 0.770929
+-0.134739 0.216678 -0.63275 0.731111
+-0.148622 0.207402 -0.679212 0.688162
+-0.161869 0.197238 -0.722766 0.642266
+-0.174422 0.186229 -0.763225 0.593619
+-0.186229 0.174422 -0.800415 0.542431
+-0.197238 0.161869 -0.834178 0.48892
+-0.207402 0.148622 -0.864369 0.433316
+-0.216678 0.134739 -0.890858 0.375855
+-0.225027 0.120279 -0.913533 0.316786
+-0.232412 0.105304 -0.932296 0.256359
+-0.238801 0.0898784 -0.947067 0.194835
+-0.244168 0.0740676 -0.957782 0.132477
+-0.24849 0.0579397 -0.964395 0.0695516
+-0.251747 0.0415636 -0.966879 0.00632836
+-0.253927 0.0250096 -0.965223 -0.0569222
+-0.255019 0.00834847 -0.959434 -0.119929
+0.306022 0.0100181 0.867117 0.392885
+0.304712 0.0300115 0.839564 0.448756
+0.302097 0.0498763 0.808416 0.502706
+0.298188 0.0695276 0.773807 0.554502
+0.293002 0.0888812 0.735884 0.603924
+0.286561 0.107854 0.69481 0.65076
+0.278894 0.126365 0.65076 0.69481
+0.270032 0.144335 0.603924 0.735884
+0.260014 0.161687 0.554502 0.773807
+0.248882 0.178347 0.502706 0.808416
+0.236685 0.194242 0.448756 0.839564
+0.223474 0.209307 0.392885 0.867117
+0.209307 0.223474 0.335332 0.890956
+0.194242 0.236685 0.276343 0.91098
+0.178347 0.248882 0.21617 0.927103
+0.161687 0.260014 0.155072 0.939256
+0.144335 0.270032 0.0933095 0.947388
+0.126365 0.278894 0.0311476 0.951462
+0.107854 0.286562 -0.0311477 0.951462
+0.0888812 0.293002 -0.0933096 0.947388
+0.0695276 0.298188 -0.155072 0.939256
+0.0498763 0.302097 -0.21617 0.927103
+0.0300115 0.304712 -0.276343 0.91098
+0.0100181 0.306022 -0.335332 0.890956
+-0.0100182 0.306022 -0.392885 0.867116
+-0.0300115 0.304712 -0.448756 0.839564
+-0.0498764 0.302097 -0.502706 0.808416
+-0.0695276 0.298188 -0.554502 0.773807
+-0.0888812 0.293002 -0.603925 0.735884
+-0.107854 0.286561 -0.650761 0.69481
+-0.126365 0.278894 -0.69481 0.65076
+-0.144335 0.270032 -0.735884 0.603924
+-0.161687 0.260014 -0.773807 0.554502
+-0.178347 0.248882 -0.808416 0.502706
+-0.194242 0.236685 -0.839564 0.448756
+-0.209307 0.223474 -0.867117 0.392885
+-0.223474 0.209307 -0.890956 0.335332
+-0.236685 0.194242 -0.91098 0.276343
+-0.248882 0.178347 -0.927103 0.21617
+-0.260014 0.161687 -0.939256 0.155072
+-0.270032 0.144335 -0.947388 0.0933096
+-0.278894 0.126365 -0.951462 0.0311476
+-0.286562 0.107854 -0.951462 -0.0311477
+-0.293002 0.0888812 -0.947388 -0.0933095
+-0.298188 0.0695276 -0.939256 -0.155072
+-0.302097 0.0498764 -0.927103 -0.21617
+-0.304712 0.0300115 -0.91098 -0.276343
+-0.306022 0.0100182 -0.890956 -0.335332
+0.306022 0.0100181 0.939256 0.155072
+0.304712 0.0300115 0.927103 0.21617
+0.302097 0.0498763 0.91098 0.276343
+0.298188 0.0695276 0.890956 0.335332
+0.293002 0.0888812 0.867117 0.392885
+0.286561 0.107854 0.839564 0.448756
+0.278894 0.126365 0.808416 0.502706
+0.270032 0.144335 0.773807 0.554502
+0.260014 0.161687 0.735884 0.603924
+0.248882 0.178347 0.69481 0.65076
+0.236685 0.194242 0.65076 0.69481
+0.223474 0.209307 0.603924 0.735884
+0.209307 0.223474 0.554502 0.773807
+0.194242 0.236685 0.502706 0.808416
+0.178347 0.248882 0.448756 0.839564
+0.161687 0.260014 0.392885 0.867117
+0.144335 0.270032 0.335332 0.890956
+0.126365 0.278894 0.276343 0.91098
+0.107854 0.286562 0.21617 0.927103
+0.0888812 0.293002 0.155072 0.939256
+0.0695276 0.298188 0.0933095 0.947388
+0.0498763 0.302097 0.0311475 0.951462
+0.0300115 0.304712 -0.0311478 0.951462
+0.0100181 0.306022 -0.0933096 0.947388
+-0.0100182 0.306022 -0.155072 0.939256
+-0.0300115 0.304712 -0.21617 0.927103
+-0.0498764 0.302097 -0.276343 0.91098
+-0.0695276 0.298188 -0.335332 0.890956
+-0.0888812 0.293002 -0.392886 0.867116
+-0.107854 0.286561 -0.448756 0.839564
+-0.126365 0.278894 -0.502706 0.808416
+-0.144335 0.270032 -0.554502 0.773807
+-0.161687 0.260014 -0.603924 0.735884
+-0.178347 0.248882 -0.650761 0.69481
+-0.194242 0.236685 -0.69481 0.650761
+-0.209307 0.223474 -0.735884 0.603924
+-0.223474 0.209307 -0.773807 0.554502
+-0.236685 0.194242 -0.808416 0.502706
+-0.248882 0.178347 -0.839564 0.448756
+-0.260014 0.161687 -0.867117 0.392885
+-0.270032 0.144335 -0.890956 0.335332
+-0.278894 0.126365 -0.91098 0.276343
+-0.286562 0.107854 -0.927103 0.21617
+-0.293002 0.0888812 -0.939256 0.155072
+-0.298188 0.0695276 -0.947388 0.0933095
+-0.302097 0.0498764 -0.951462 0.0311477
+-0.304712 0.0300115 -0.951462 -0.0311477
+-0.306022 0.0100182 -0.947388 -0.0933095
+0.357026 0.0116878 0.871042 0.337168
+0.355497 0.0350134 0.847125 0.393415
+0.352446 0.0581891 0.819581 0.447977
+0.347886 0.0811156 0.788527 0.500621
+0.341836 0.103695 0.754096 0.551121
+0.334322 0.12583 0.716437 0.599262
+0.325376 0.147426 0.675709 0.644836
+0.315037 0.168391 0.632088 0.687649
+0.30335 0.188635 0.58576 0.727517
+0.290363 0.208071 0.536924 0.76427
+0.276133 0.226616 0.485789 0.79775
+0.26072 0.244191 0.432574 0.827814
+0.244191 0.26072 0.377506 0.854333
+0.226616 0.276133 0.320821 0.877194
+0.208071 0.290363 0.262763 0.896299
+0.188635 0.30335 0.20358 0.911565
+0.168391 0.315038 0.143525 0.922928
+0.147426 0.325376 0.0828551 0.930339
+0.12583 0.334322 0.0218307 0.933766
+0.103695 0.341836 -0.0392873 0.933195
+0.0811155 0.347886 -0.100237 0.928627
+0.058189 0.352446 -0.160758 0.920083
+0.0350134 0.355497 -0.22059 0.907599
+0.0116878 0.357026 -0.279477 0.891229
+-0.0116879 0.357026 -0.337168 0.871042
+-0.0350134 0.355497 -0.393415 0.847125
+-0.0581891 0.352446 -0.447977 0.819581
+-0.0811156 0.347886 -0.500621 0.788527
+-0.103695 0.341836 -0.551121 0.754096
+-0.12583 0.334322 -0.599262 0.716436
+-0.147426 0.325376 -0.644836 0.675709
+-0.168391 0.315038 -0.687649 0.632088
+-0.188635 0.30335 -0.727517 0.58576
+-0.208071 0.290363 -0.76427 0.536924
+-0.226616 0.276133 -0.79775 0.485789
+-0.244191 0.26072 -0.827814 0.432574
+-0.26072 0.244191 -0.854333 0.377506
+-0.276133 0.226616 -0.877194 0.320821
+-0.290363 0.208071 -0.896299 0.262763
+-0.30335 0.188635 -0.911565 0.20358
+-0.315037 0.168391 -0.922928 0.143525
+-0.325376 0.147426 -0.930339 0.0828552
+-0.334322 0.12583 -0.933766 0.0218306
+-0.341836 0.103695 -0.933195 -0.0392872
+-0.347886 0.0811155 -0.928627 -0.100237
+-0.352446 0.0581891 -0.920083 -0.160757
+-0.355497 0.0350134 -0.907599 -0.22059
+-0.357026 0.0116879 -0.891229 -0.279477
+0.357026 0.0116878 0.774176 0.522539
+0.355497 0.0350134 0.738343 0.572054
+0.352446 0.0581891 0.699348 0.619119
+0.347886 0.0811156 0.657358 0.663533
+0.341836 0.103695 0.612553 0.705106
+0.334322 0.12583 0.565126 0.743659
+0.325376 0.147426 0.515278 0.779028
+0.315037 0.168391 0.463224 0.811061
+0.30335 0.188635 0.409186 0.83962
+0.290363 0.208071 0.353396 0.864585
+0.276133 0.226616 0.296093 0.885847
+0.26072 0.244191 0.237522 0.903316
+0.244191 0.26072 0.177934 0.916916
+0.226616 0.276133 0.117584 0.92659
+0.208071 0.290363 0.05673 0.932297
+0.188635 0.30335 -0.00436661 0.934011
+0.168391 0.315038 -0.0654445 0.931726
+0.147426 0.325376 -0.126242 0.925451
+0.12583 0.334322 -0.186499 0.915212
+0.103695 0.341836 -0.245958 0.901055
+0.0811155 0.347886 -0.304363 0.88304
+0.058189 0.352446 -0.361465 0.861243
+0.0350134 0.355497 -0.417019 0.835758
+0.0116878 0.357026 -0.470787 0.806694
+-0.0116879 0.357026 -0.522539 0.774176
+-0.0350134 0.355497 -0.572054 0.738343
+-0.0581891 0.352446 -0.619119 0.699348
+-0.0811156 0.347886 -0.663533 0.657358
+-0.103695 0.341836 -0.705106 0.612553
+-0.12583 0.334322 -0.743659 0.565126
+-0.147426 0.325376 -0.779028 0.515278
+-0.168391 0.315038 -0.811061 0.463224
+-0.188635 0.30335 -0.83962 0.409186
+-0.208071 0.290363 -0.864585 0.353396
+-0.226616 0.276133 -0.885847 0.296093
+-0.244191 0.26072 -0.903316 0.237522
+-0.26072 0.244191 -0.916916 0.177934
+-0.276133 0.226616 -0.92659 0.117584
+-0.290363 0.208071 -0.932297 0.0567302
+-0.30335 0.188635 -0.934011 -0.00436654
+-0.315037 0.168391 -0.931726 -0.0654443
+-0.325376 0.147426 -0.925451 -0.126242
+-0.334322 0.12583 -0.915212 -0.186499
+-0.341836 0.103695 -0.901055 -0.245958
+-0.347886 0.0811155 -0.88304 -0.304363
+-0.352446 0.0581891 -0.861243 -0.361465
+-0.355497 0.0350134 -0.835758 -0.417019
+-0.357026 0.0116879 -0.806694 -0.470787
+0.40803 0.0133575 0.686333 0.601898
+0.406282 0.0400153 0.645497 0.645497
+0.402795 0.0665018 0.601898 0.686333
+0.397584 0.0927035 0.555721 0.724229
+0.390669 0.118508 0.507164 0.759024
+0.382082 0.143805 0.456435 0.790569
+0.371859 0.168487 0.403752 0.818729
+0.360043 0.192447 0.349341 0.843383
+0.346685 0.215583 0.293433 0.864425
+0.331843 0.237796 0.236268 0.881766
+0.31558 0.25899 0.178092 0.89533
+0.297966 0.279075 0.119154 0.905061
+0.279075 0.297966 0.0597046 0.910916
+0.25899 0.31558 -2.0411e-09 0.912871
+0.237796 0.331843 -0.0597047 0.910916
+0.215583 0.346685 -0.119154 0.905061
+0.192447 0.360043 -0.178092 0.89533
+0.168487 0.371859 -0.236268 0.881766
+0.143805 0.382082 -0.293433 0.864425
+0.118508 0.390669 -0.349341 0.843383
+0.0927035 0.397584 -0.403753 0.818729
+0.0665017 0.402795 -0.456436 0.790569
+0.0400153 0.406282 -0.507164 0.759024
+0.0133575 0.40803 -0.555721 0.724229
+-0.0133575 0.40803 -0.601898 0.686333
+-0.0400154 0.406282 -0.645497 0.645497
+-0.0665018 0.402795 -0.686333 0.601898
+-0.0927035 0.397584 -0.724229 0.555721
+-0.118508 0.390669 -0.759025 0.507164
+-0.143806 0.382082 -0.790569 0.456435
+-0.168487 0.371859 -0.818729 0.403752
+-0.192447 0.360043 -0.843383 0.349341
+-0.215583 0.346685 -0.864425 0.293433
+-0.237796 0.331843 -0.881766 0.236268
+-0.25899 0.31558 -0.89533 0.178092
+-0.279075 0.297966 -0.905061 0.119154
+-0.297966 0.279075 -0.910916 0.0597047
+-0.31558 0.25899 -0.912871 -4.1944e-08
+-0.331843 0.237796 -0.910916 -0.0597045
+-0.346685 0.215583 -0.905061 -0.119154
+-0.360043 0.192447 -0.89533 -0.178092
+-0.371859 0.168487 -0.881766 -0.236268
+-0.382082 0.143805 -0.864425 -0.293433
+-0.390669 0.118508 -0.843383 -0.349341
+-0.397584 0.0927035 -0.818729 -0.403753
+-0.402795 0.0665018 -0.790569 -0.456435
+-0.406282 0.0400153 -0.759024 -0.507164
+-0.40803 0.0133575 -0.724229 -0.555721
+0.40803 0.0133575 0.790569 0.456435
+0.406282 0.0400153 0.759024 0.507164
+0.402795 0.0665018 0.724229 0.555721
+0.397584 0.0927035 0.686333 0.601898
+0.390669 0.118508 0.645497 0.645497
+0.382082 0.143805 0.601898 0.686333
+0.371859 0.168487 0.555721 0.724229
+0.360043 0.192447 0.507164 0.759024
+0.346685 0.215583 0.456435 0.790569
+0.331843 0.237796 0.403752 0.818729
+0.31558 0.25899 0.349341 0.843383
+0.297966 0.279075 0.293433 0.864425
+0.279075 0.297966 0.236268 0.881766
+0.25899 0.31558 0.178092 0.89533
+0.237796 0.331843 0.119154 0.905061
+0.215583 0.346685 0.0597045 0.910916
+0.192447 0.360043 -7.50431e-08 0.912871
+0.168487 0.371859 -0.0597047 0.910916
+0.143805 0.382082 -0.119154 0.905061
+0.118508 0.390669 -0.178092 0.89533
+0.0927035 0.397584 -0.236268 0.881766
+0.0665017 0.402795 -0.293433 0.864425
+0.0400153 0.406282 -0.349341 0.843383
+0.0133575 0.40803 -0.403753 0.818729
+-0.0133575 0.40803 -0.456436 0.790569
+-0.0400154 0.406282 -0.507164 0.759024
+-0.0665018 0.402795 -0.555721 0.724229
+-0.0927035 0.397584 -0.601898 0.686333
+-0.118508 0.390669 -0.645497 0.645497
+-0.143806 0.382082 -0.686333 0.601898
+-0.168487 0.371859 -0.724229 0.555721
+-0.192447 0.360043 -0.759024 0.507164
+-0.215583 0.346685 -0.790569 0.456435
+-0.237796 0.331843 -0.818729 0.403752
+-0.25899 0.31558 -0.843383 0.349341
+-0.279075 0.297966 -0.864425 0.293433
+-0.297966 0.279075 -0.881766 0.236268
+-0.31558 0.25899 -0.89533 0.178092
+-0.331843 0.237796 -0.905061 0.119154
+-0.346685 0.215583 -0.910916 0.0597046
+-0.360043 0.192447 -0.912871 1.02699e-07
+-0.371859 0.168487 -0.910916 -0.0597046
+-0.382082 0.143805 -0.905061 -0.119154
+-0.390669 0.118508 -0.89533 -0.178092
+-0.397584 0.0927035 -0.881766 -0.236268
+-0.402795 0.0665018 -0.864425 -0.293433
+-0.406282 0.0400153 -0.843383 -0.349341
+-0.40803 0.0133575 -0.818729 -0.403752
+0.456191 0.0149342 0.723236 0.518263
+0.454238 0.0447385 0.687791 0.564456
+0.450339 0.0743513 0.649401 0.608231
+0.444512 0.103646 0.608231 0.649401
+0.436782 0.132496 0.564456 0.687791
+0.427181 0.160779 0.518263 0.723236
+0.415751 0.188374 0.469852 0.755583
+0.40254 0.215162 0.419428 0.784695
+0.387606 0.241029 0.367209 0.810447
+0.371012 0.265863 0.313417 0.832728
+0.352829 0.28956 0.258283 0.851444
+0.333136 0.312016 0.202043 0.866513
+0.312016 0.333136 0.144937 0.877872
+0.28956 0.352829 0.0872114 0.885472
+0.265863 0.371012 0.029112 0.88928
+0.241029 0.387606 -0.0291121 0.88928
+0.215162 0.40254 -0.0872115 0.885472
+0.188374 0.415751 -0.144937 0.877872
+0.160779 0.427181 -0.202043 0.866513
+0.132496 0.436782 -0.258283 0.851444
+0.103646 0.444512 -0.313417 0.832728
+0.0743512 0.450339 -0.367209 0.810447
+0.0447384 0.454238 -0.419428 0.784695
+0.0149341 0.456191 -0.469852 0.755583
+-0.0149342 0.456191 -0.518263 0.723236
+-0.0447385 0.454238 -0.564456 0.687791
+-0.0743513 0.450339 -0.608231 0.649401
+-0.103646 0.444512 -0.649401 0.608231
+-0.132496 0.436781 -0.687791 0.564455
+-0.160779 0.427181 -0.723236 0.518263
+-0.188374 0.415751 -0.755583 0.469852
+-0.215162 0.40254 -0.784695 0.419428
+-0.241029 0.387606 -0.810447 0.367209
+-0.265864 0.371012 -0.832728 0.313417
+-0.28956 0.352829 -0.851444 0.258283
+-0.312016 0.333136 -0.866513 0.202043
+-0.333136 0.312016 -0.877872 0.144937
+-0.352829 0.28956 -0.885472 0.0872113
+-0.371012 0.265864 -0.88928 0.0291121
+-0.387606 0.241029 -0.88928 -0.029112
+-0.40254 0.215162 -0.885472 -0.0872113
+-0.415751 0.188374 -0.877872 -0.144937
+-0.427181 0.160779 -0.866513 -0.202043
+-0.436782 0.132496 -0.851444 -0.258283
+-0.444512 0.103646 -0.832728 -0.313417
+-0.450339 0.0743513 -0.810447 -0.367209
+-0.454238 0.0447385 -0.784695 -0.419428
+-0.456191 0.0149342 -0.755583 -0.469852
+0.357026 0.0116878 0.92423 0.134889
+0.355497 0.0350134 0.913429 0.195048
+0.352446 0.0581891 0.898716 0.254372
+0.347886 0.0811156 0.880155 0.312606
+0.341836 0.103695 0.857825 0.369501
+0.334322 0.12583 0.831822 0.424815
+0.325376 0.147426 0.802257 0.478309
+0.315037 0.168391 0.769256 0.529755
+0.30335 0.188635 0.732962 0.578933
+0.290363 0.208071 0.693528 0.625631
+0.276133 0.226616 0.651125 0.66965
+0.26072 0.244191 0.605934 0.710802
+0.244191 0.26072 0.558148 0.74891
+0.226616 0.276133 0.507972 0.783811
+0.208071 0.290363 0.45562 0.815356
+0.188635 0.30335 0.401318 0.84341
+0.168391 0.315038 0.345297 0.867851
+0.147426 0.325376 0.287798 0.888577
+0.12583 0.334322 0.229066 0.905497
+0.103695 0.341836 0.169353 0.91854
+0.0811155 0.347886 0.108915 0.927649
+0.058189 0.352446 0.0480106 0.932787
+0.0350134 0.355497 -0.0130993 0.933929
+0.0116878 0.357026 -0.0741532 0.931073
+-0.0116879 0.357026 -0.134889 0.92423
+-0.0350134 0.355497 -0.195048 0.913429
+-0.0581891 0.352446 -0.254372 0.898716
+-0.0811156 0.347886 -0.312606 0.880155
+-0.103695 0.341836 -0.369502 0.857825
+-0.12583 0.334322 -0.424815 0.831822
+-0.147426 0.325376 -0.478309 0.802257
+-0.168391 0.315038 -0.529755 0.769257
+-0.188635 0.30335 -0.578933 0.732962
+-0.208071 0.290363 -0.625631 0.693528
+-0.226616 0.276133 -0.66965 0.651125
+-0.244191 0.26072 -0.710802 0.605934
+-0.26072 0.244191 -0.74891 0.558148
+-0.276133 0.226616 -0.783812 0.507972
+-0.290363 0.208071 -0.815356 0.455621
+-0.30335 0.188635 -0.84341 0.401318
+-0.315037 0.168391 -0.867851 0.345297
+-0.325376 0.147426 -0.888577 0.287798
+-0.334322 0.12583 -0.905497 0.229066
+-0.341836 0.103695 -0.91854 0.169353
+-0.347886 0.0811155 -0.927649 0.108915
+-0.352446 0.0581891 -0.932787 0.0480108
+-0.355497 0.0350134 -0.933929 -0.0130992
+-0.357026 0.0116879 -0.931073 -0.074153
+0.40803 0.0133575 0.864425 0.293433
+0.406282 0.0400153 0.843383 0.349341
+0.402795 0.0665018 0.818729 0.403752
+0.397584 0.0927035 0.790569 0.456435
+0.390669 0.118508 0.759024 0.507164
+0.382082 0.143805 0.724229 0.555721
+0.371859 0.168487 0.686333 0.601898
+0.360043 0.192447 0.645497 0.645497
+0.346685 0.215583 0.601898 0.686333
+0.331843 0.237796 0.555721 0.724229
+0.31558 0.25899 0.507164 0.759024
+0.297966 0.279075 0.456435 0.790569
+0.279075 0.297966 0.403752 0.818729
+0.25899 0.31558 0.349341 0.843383
+0.237796 0.331843 0.293433 0.864425
+0.215583 0.346685 0.236268 0.881766
+0.192447 0.360043 0.178092 0.89533
+0.168487 0.371859 0.119154 0.905061
+0.143805 0.382082 0.0597046 0.910916
+0.118508 0.390669 -3.92225e-08 0.912871
+0.0927035 0.397584 -0.0597046 0.910916
+0.0665017 0.402795 -0.119154 0.905061
+0.0400153 0.406282 -0.178092 0.89533
+0.0133575 0.40803 -0.236268 0.881766
+-0.0133575 0.40803 -0.293433 0.864425
+-0.0400154 0.406282 -0.349341 0.843383
+-0.0665018 0.402795 -0.403753 0.818729
+-0.0927035 0.397584 -0.456436 0.790569
+-0.118508 0.390669 -0.507164 0.759024
+-0.143806 0.382082 -0.555721 0.724229
+-0.168487 0.371859 -0.601898 0.686333
+-0.192447 0.360043 -0.645497 0.645497
+-0.215583 0.346685 -0.686333 0.601898
+-0.237796 0.331843 -0.724229 0.555721
+-0.25899 0.31558 -0.759024 0.507164
+-0.279075 0.297966 -0.790569 0.456435
+-0.297966 0.279075 -0.818729 0.403753
+-0.31558 0.25899 -0.843383 0.349341
+-0.331843 0.237796 -0.864425 0.293433
+-0.346685 0.215583 -0.881766 0.236268
+-0.360043 0.192447 -0.89533 0.178092
+-0.371859 0.168487 -0.905061 0.119154
+-0.382082 0.143805 -0.910916 0.0597045
+-0.390669 0.118508 -0.912871 2.96973e-08
+-0.397584 0.0927035 -0.910916 -0.0597047
+-0.402795 0.0665018 -0.905061 -0.119154
+-0.406282 0.0400153 -0.89533 -0.178092
+-0.40803 0.0133575 -0.881766 -0.236268
+0.40803 0.0133575 0.905061 0.119154
+0.406282 0.0400153 0.89533 0.178092
+0.402795 0.0665018 0.881766 0.236268
+0.397584 0.0927035 0.864425 0.293433
+0.390669 0.118508 0.843383 0.349341
+0.382082 0.143805 0.818729 0.403752
+0.371859 0.168487 0.790569 0.456435
+0.360043 0.192447 0.759024 0.507164
+0.346685 0.215583 0.724229 0.555721
+0.331843 0.237796 0.686333 0.601898
+0.31558 0.25899 0.645497 0.645497
+0.297966 0.279075 0.601898 0.686333
+0.279075 0.297966 0.555721 0.724229
+0.25899 0.31558 0.507164 0.759024
+0.237796 0.331843 0.456435 0.790569
+0.215583 0.346685 0.403752 0.818729
+0.192447 0.360043 0.349341 0.843383
+0.168487 0.371859 0.293433 0.864425
+0.143805 0.382082 0.236268 0.881766
+0.118508 0.390669 0.178092 0.89533
+0.0927035 0.397584 0.119154 0.905061
+0.0665017 0.402795 0.0597045 0.910916
+0.0400153 0.406282 -1.12225e-07 0.912871
+0.0133575 0.40803 -0.0597047 0.910916
+-0.0133575 0.40803 -0.119154 0.905061
+-0.0400154 0.406282 -0.178092 0.89533
+-0.0665018 0.402795 -0.236268 0.881766
+-0.0927035 0.397584 -0.293433 0.864425
+-0.118508 0.390669 -0.349341 0.843383
+-0.143806 0.382082 -0.403753 0.818729
+-0.168487 0.371859 -0.456436 0.790569
+-0.192447 0.360043 -0.507164 0.759024
+-0.215583 0.346685 -0.555721 0.724229
+-0.237796 0.331843 -0.601898 0.686333
+-0.25899 0.31558 -0.645497 0.645497
+-0.279075 0.297966 -0.686333 0.601898
+-0.297966 0.279075 -0.724229 0.555721
+-0.31558 0.25899 -0.759024 0.507164
+-0.331843 0.237796 -0.790569 0.456436
+-0.346685 0.215583 -0.818729 0.403752
+-0.360043 0.192447 -0.843383 0.349341
+-0.371859 0.168487 -0.864425 0.293433
+-0.382082 0.143805 -0.881766 0.236268
+-0.390669 0.118508 -0.89533 0.178092
+-0.397584 0.0927035 -0.905061 0.119153
+-0.402795 0.0665018 -0.910916 0.0597047
+-0.406282 0.0400153 -0.912871 -4.33047e-08
+-0.40803 0.0133575 -0.910916 -0.0597045
+0.456191 0.0149342 0.866513 0.202043
+0.454238 0.0447385 0.851444 0.258283
+0.450339 0.0743513 0.832728 0.313417
+0.444512 0.103646 0.810447 0.367209
+0.436782 0.132496 0.784695 0.419428
+0.427181 0.160779 0.755583 0.469852
+0.415751 0.188374 0.723236 0.518263
+0.40254 0.215162 0.687791 0.564456
+0.387606 0.241029 0.649401 0.608231
+0.371012 0.265863 0.608231 0.649401
+0.352829 0.28956 0.564456 0.687791
+0.333136 0.312016 0.518263 0.723236
+0.312016 0.333136 0.469852 0.755583
+0.28956 0.352829 0.419428 0.784695
+0.265863 0.371012 0.367209 0.810447
+0.241029 0.387606 0.313417 0.832728
+0.215162 0.40254 0.258283 0.851444
+0.188374 0.415751 0.202043 0.866513
+0.160779 0.427181 0.144937 0.877872
+0.132496 0.436782 0.0872114 0.885472
+0.103646 0.444512 0.029112 0.88928
+0.0743512 0.450339 -0.0291121 0.88928
+0.0447384 0.454238 -0.0872115 0.885472
+0.0149341 0.456191 -0.144937 0.877872
+-0.0149342 0.456191 -0.202043 0.866513
+-0.0447385 0.454238 -0.258283 0.851444
+-0.0743513 0.450339 -0.313417 0.832728
+-0.103646 0.444512 -0.367209 0.810447
+-0.132496 0.436781 -0.419428 0.784695
+-0.160779 0.427181 -0.469852 0.755583
+-0.188374 0.415751 -0.518263 0.723236
+-0.215162 0.40254 -0.564455 0.687791
+-0.241029 0.387606 -0.608231 0.649401
+-0.265864 0.371012 -0.649401 0.608231
+-0.28956 0.352829 -0.687791 0.564456
+-0.312016 0.333136 -0.723236 0.518263
+-0.333136 0.312016 -0.755583 0.469852
+-0.352829 0.28956 -0.784695 0.419428
+-0.371012 0.265864 -0.810447 0.367209
+-0.387606 0.241029 -0.832728 0.313417
+-0.40254 0.215162 -0.851444 0.258283
+-0.415751 0.188374 -0.866513 0.202043
+-0.427181 0.160779 -0.877872 0.144937
+-0.436782 0.132496 -0.885472 0.0872114
+-0.444512 0.103646 -0.88928 0.029112
+-0.450339 0.0743513 -0.88928 -0.029112
+-0.454238 0.0447385 -0.885472 -0.0872114
+-0.456191 0.0149342 -0.877872 -0.144937
+0.456191 0.0149342 0.810447 0.367209
+0.454238 0.0447385 0.784695 0.419428
+0.450339 0.0743513 0.755583 0.469852
+0.444512 0.103646 0.723236 0.518263
+0.436782 0.132496 0.687791 0.564456
+0.427181 0.160779 0.649401 0.608231
+0.415751 0.188374 0.608231 0.649401
+0.40254 0.215162 0.564456 0.687791
+0.387606 0.241029 0.518263 0.723236
+0.371012 0.265863 0.469852 0.755583
+0.352829 0.28956 0.419428 0.784695
+0.333136 0.312016 0.367209 0.810447
+0.312016 0.333136 0.313417 0.832728
+0.28956 0.352829 0.258283 0.851444
+0.265863 0.371012 0.202043 0.866513
+0.241029 0.387606 0.144937 0.877872
+0.215162 0.40254 0.0872113 0.885472
+0.188374 0.415751 0.029112 0.88928
+0.160779 0.427181 -0.0291121 0.88928
+0.132496 0.436782 -0.0872114 0.885472
+0.103646 0.444512 -0.144937 0.877872
+0.0743512 0.450339 -0.202043 0.866513
+0.0447384 0.454238 -0.258283 0.851444
+0.0149341 0.456191 -0.313417 0.832728
+-0.0149342 0.456191 -0.367209 0.810447
+-0.0447385 0.454238 -0.419428 0.784695
+-0.0743513 0.450339 -0.469852 0.755583
+-0.103646 0.444512 -0.518263 0.723236
+-0.132496 0.436781 -0.564456 0.687791
+-0.160779 0.427181 -0.608231 0.649401
+-0.188374 0.415751 -0.649401 0.608231
+-0.215162 0.40254 -0.687791 0.564456
+-0.241029 0.387606 -0.723236 0.518263
+-0.265864 0.371012 -0.755583 0.469852
+-0.28956 0.352829 -0.784695 0.419428
+-0.312016 0.333136 -0.810447 0.367209
+-0.333136 0.312016 -0.832728 0.313417
+-0.352829 0.28956 -0.851444 0.258283
+-0.371012 0.265864 -0.866513 0.202043
+-0.387606 0.241029 -0.877872 0.144937
+-0.40254 0.215162 -0.885472 0.0872115
+-0.415751 0.188374 -0.88928 0.029112
+-0.427181 0.160779 -0.88928 -0.0291121
+-0.436782 0.132496 -0.885472 -0.0872114
+-0.444512 0.103646 -0.877872 -0.144937
+-0.450339 0.0743513 -0.866513 -0.202043
+-0.454238 0.0447385 -0.851444 -0.258283
+-0.456191 0.0149342 -0.832728 -0.313417
+0.499732 0.0163595 0.75 0.433013
+0.497592 0.0490086 0.720074 0.481138
+0.493322 0.0814477 0.687064 0.527203
+0.486938 0.113538 0.651112 0.57101
+0.47847 0.145142 0.612372 0.612372
+0.467953 0.176125 0.57101 0.651112
+0.455432 0.206354 0.527203 0.687064
+0.440961 0.235698 0.481138 0.720074
+0.424601 0.264034 0.433013 0.75
+0.406423 0.291239 0.383033 0.776715
+0.386505 0.317197 0.331414 0.800103
+0.364932 0.341796 0.278375 0.820066
+0.341796 0.364932 0.224144 0.836516
+0.317197 0.386505 0.168953 0.849385
+0.291239 0.406423 0.113039 0.858616
+0.264034 0.424601 0.0566407 0.864171
+0.235698 0.440961 -7.11922e-08 0.866025
+0.206353 0.455432 -0.0566408 0.864171
+0.176125 0.467953 -0.113039 0.858616
+0.145142 0.47847 -0.168953 0.849385
+0.113538 0.486938 -0.224144 0.836516
+0.0814477 0.493322 -0.278375 0.820066
+0.0490085 0.497592 -0.331414 0.800103
+0.0163595 0.499732 -0.383033 0.776715
+-0.0163596 0.499732 -0.433013 0.75
+-0.0490086 0.497592 -0.481138 0.720074
+-0.0814478 0.493322 -0.527203 0.687064
+-0.113538 0.486938 -0.57101 0.651112
+-0.145142 0.47847 -0.612373 0.612372
+-0.176125 0.467953 -0.651112 0.57101
+-0.206354 0.455432 -0.687064 0.527203
+-0.235698 0.440961 -0.720074 0.481138
+-0.264034 0.424601 -0.75 0.433013
+-0.291239 0.406423 -0.776715 0.383033
+-0.317197 0.386505 -0.800103 0.331414
+-0.341796 0.364932 -0.820066 0.278375
+-0.364932 0.341796 -0.836516 0.224144
+-0.386505 0.317197 -0.849385 0.168953
+-0.406423 0.291239 -0.858616 0.113039
+-0.424601 0.264034 -0.864171 0.0566408
+-0.440961 0.235698 -0.866025 9.74292e-08
+-0.455432 0.206354 -0.864171 -0.0566408
+-0.467953 0.176125 -0.858616 -0.113039
+-0.47847 0.145142 -0.849385 -0.168953
+-0.486938 0.113538 -0.836516 -0.224144
+-0.493322 0.0814478 -0.820066 -0.278375
+-0.497592 0.0490085 -0.800103 -0.331414
+-0.499732 0.0163596 -0.776715 -0.383033
+0.499732 0.0163595 0.820066 0.278375
+0.497592 0.0490086 0.800103 0.331414
+0.493322 0.0814477 0.776715 0.383033
+0.486938 0.113538 0.75 0.433013
+0.47847 0.145142 0.720074 0.481138
+0.467953 0.176125 0.687064 0.527203
+0.455432 0.206354 0.651112 0.57101
+0.440961 0.235698 0.612372 0.612372
+0.424601 0.264034 0.57101 0.651112
+0.406423 0.291239 0.527203 0.687064
+0.386505 0.317197 0.481138 0.720074
+0.364932 0.341796 0.433013 0.75
+0.341796 0.364932 0.383033 0.776715
+0.317197 0.386505 0.331414 0.800103
+0.291239 0.406423 0.278375 0.820066
+0.264034 0.424601 0.224144 0.836516
+0.235698 0.440961 0.168953 0.849385
+0.206353 0.455432 0.113039 0.858616
+0.176125 0.467953 0.0566407 0.864171
+0.145142 0.47847 -3.72097e-08 0.866025
+0.113538 0.486938 -0.0566408 0.864171
+0.0814477 0.493322 -0.113039 0.858616
+0.0490085 0.497592 -0.168953 0.849385
+0.0163595 0.499732 -0.224144 0.836516
+-0.0163596 0.499732 -0.278375 0.820066
+-0.0490086 0.497592 -0.331414 0.800103
+-0.0814478 0.493322 -0.383033 0.776715
+-0.113538 0.486938 -0.433013 0.75
+-0.145142 0.47847 -0.481138 0.720074
+-0.176125 0.467953 -0.527203 0.687064
+-0.206354 0.455432 -0.57101 0.651112
+-0.235698 0.440961 -0.612372 0.612373
+-0.264034 0.424601 -0.651112 0.57101
+-0.291239 0.406423 -0.687064 0.527203
+-0.317197 0.386505 -0.720074 0.481138
+-0.341796 0.364932 -0.75 0.433013
+-0.364932 0.341796 -0.776715 0.383033
+-0.386505 0.317197 -0.800103 0.331414
+-0.406423 0.291239 -0.820066 0.278375
+-0.424601 0.264034 -0.836516 0.224144
+-0.440961 0.235698 -0.849385 0.168953
+-0.455432 0.206354 -0.858616 0.113039
+-0.467953 0.176125 -0.864171 0.0566407
+-0.47847 0.145142 -0.866025 2.81734e-08
+-0.486938 0.113538 -0.864171 -0.0566408
+-0.493322 0.0814478 -0.858616 -0.113039
+-0.497592 0.0490085 -0.849385 -0.168953
+-0.499732 0.0163596 -0.836516 -0.224144
+0.539773 0.0176703 0.766606 0.347345
+0.537461 0.0529353 0.742247 0.396739
+0.532848 0.0879736 0.71471 0.444435
+0.525954 0.122635 0.684112 0.490228
+0.516807 0.156772 0.650585 0.533922
+0.505447 0.190237 0.614272 0.575329
+0.491923 0.222887 0.575329 0.614272
+0.476292 0.254583 0.533921 0.650585
+0.458622 0.285189 0.490228 0.684112
+0.438987 0.314574 0.444435 0.71471
+0.417473 0.342612 0.396739 0.742247
+0.394172 0.369182 0.347345 0.766606
+0.369182 0.394172 0.296463 0.787682
+0.342612 0.417473 0.244311 0.805385
+0.314574 0.438987 0.191113 0.81964
+0.285189 0.458622 0.137097 0.830384
+0.254583 0.476292 0.0824936 0.837573
+0.222887 0.491923 0.0275372 0.841175
+0.190237 0.505447 -0.0275373 0.841175
+0.156772 0.516807 -0.0824938 0.837573
+0.122635 0.525954 -0.137097 0.830384
+0.0879735 0.532848 -0.191113 0.81964
+0.0529352 0.537461 -0.244311 0.805385
+0.0176703 0.539773 -0.296463 0.787682
+-0.0176704 0.539773 -0.347345 0.766606
+-0.0529354 0.537461 -0.39674 0.742247
+-0.0879736 0.532848 -0.444435 0.71471
+-0.122635 0.525954 -0.490228 0.684112
+-0.156772 0.516807 -0.533922 0.650585
+-0.190237 0.505447 -0.575329 0.614272
+-0.222887 0.491923 -0.614272 0.575329
+-0.254583 0.476292 -0.650585 0.533922
+-0.285189 0.458622 -0.684112 0.490228
+-0.314574 0.438987 -0.71471 0.444435
+-0.342612 0.417473 -0.742247 0.396739
+-0.369182 0.394172 -0.766606 0.347345
+-0.394172 0.369182 -0.787682 0.296463
+-0.417473 0.342612 -0.805385 0.244311
+-0.438987 0.314574 -0.81964 0.191113
+-0.458622 0.285189 -0.830384 0.137097
+-0.476292 0.254583 -0.837573 0.0824938
+-0.491923 0.222887 -0.841175 0.0275372
+-0.505447 0.190237 -0.841175 -0.0275373
+-0.516807 0.156772 -0.837573 -0.0824937
+-0.525954 0.122635 -0.830384 -0.137097
+-0.532848 0.0879736 -0.81964 -0.191113
+-0.537461 0.0529353 -0.805385 -0.244311
+-0.539773 0.0176704 -0.787682 -0.296463
+0.456191 0.0149342 0.608231 0.649401
+0.454238 0.0447385 0.564456 0.687791
+0.450339 0.0743513 0.518263 0.723236
+0.444512 0.103646 0.469852 0.755583
+0.436782 0.132496 0.419428 0.784695
+0.427181 0.160779 0.367209 0.810447
+0.415751 0.188374 0.313417 0.832728
+0.40254 0.215162 0.258283 0.851444
+0.387606 0.241029 0.202043 0.866513
+0.371012 0.265863 0.144937 0.877872
+0.352829 0.28956 0.0872114 0.885472
+0.333136 0.312016 0.029112 0.88928
+0.312016 0.333136 -0.029112 0.88928
+0.28956 0.352829 -0.0872114 0.885472
+0.265863 0.371012 -0.144937 0.877872
+0.241029 0.387606 -0.202043 0.866513
+0.215162 0.40254 -0.258283 0.851444
+0.188374 0.415751 -0.313417 0.832728
+0.160779 0.427181 -0.367209 0.810447
+0.132496 0.436782 -0.419428 0.784695
+0.103646 0.444512 -0.469852 0.755583
+0.0743512 0.450339 -0.518263 0.723236
+0.0447384 0.454238 -0.564456 0.687791
+0.0149341 0.456191 -0.608231 0.649401
+-0.0149342 0.456191 -0.649401 0.608231
+-0.0447385 0.454238 -0.687791 0.564456
+-0.0743513 0.450339 -0.723236 0.518263
+-0.103646 0.444512 -0.755583 0.469852
+-0.132496 0.436781 -0.784695 0.419428
+-0.160779 0.427181 -0.810447 0.367209
+-0.188374 0.415751 -0.832728 0.313417
+-0.215162 0.40254 -0.851444 0.258283
+-0.241029 0.387606 -0.866513 0.202043
+-0.265864 0.371012 -0.877872 0.144937
+-0.28956 0.352829 -0.885472 0.0872114
+-0.312016 0.333136 -0.88928 0.029112
+-0.333136 0.312016 -0.88928 -0.029112
+-0.352829 0.28956 -0.885472 -0.0872114
+-0.371012 0.265864 -0.877872 -0.144937
+-0.387606 0.241029 -0.866513 -0.202043
+-0.40254 0.215162 -0.851444 -0.258283
+-0.415751 0.188374 -0.832728 -0.313417
+-0.427181 0.160779 -0.810447 -0.367209
+-0.436782 0.132496 -0.784695 -0.419428
+-0.444512 0.103646 -0.755583 -0.469852
+-0.450339 0.0743513 -0.723236 -0.518263
+-0.454238 0.0447385 -0.687791 -0.564456
+-0.456191 0.0149342 -0.649401 -0.608231
+0.499732 0.0163595 0.527203 0.687064
+0.497592 0.0490086 0.481138 0.720074
+0.493322 0.0814477 0.433013 0.75
+0.486938 0.113538 0.383033 0.776715
+0.47847 0.145142 0.331414 0.800103
+0.467953 0.176125 0.278375 0.820066
+0.455432 0.206354 0.224144 0.836516
+0.440961 0.235698 0.168953 0.849385
+0.424601 0.264034 0.113039 0.858616
+0.406423 0.291239 0.0566408 0.864171
+0.386505 0.317197 1.57003e-08 0.866025
+0.364932 0.341796 -0.0566408 0.864171
+0.341796 0.364932 -0.113039 0.858616
+0.317197 0.386505 -0.168953 0.849385
+0.291239 0.406423 -0.224144 0.836516
+0.264034 0.424601 -0.278375 0.820066
+0.235698 0.440961 -0.331414 0.800103
+0.206353 0.455432 -0.383033 0.776715
+0.176125 0.467953 -0.433013 0.75
+0.145142 0.47847 -0.481138 0.720074
+0.113538 0.486938 -0.527203 0.687064
+0.0814477 0.493322 -0.57101 0.651112
+0.0490085 0.497592 -0.612373 0.612372
+0.0163595 0.499732 -0.651112 0.57101
+-0.0163596 0.499732 -0.687064 0.527203
+-0.0490086 0.497592 -0.720074 0.481138
+-0.0814478 0.493322 -0.75 0.433013
+-0.113538 0.486938 -0.776715 0.383033
+-0.145142 0.47847 -0.800103 0.331413
+-0.176125 0.467953 -0.820066 0.278375
+-0.206354 0.455432 -0.836516 0.224144
+-0.235698 0.440961 -0.849385 0.168953
+-0.264034 0.424601 -0.858616 0.113039
+-0.291239 0.406423 -0.864171 0.0566407
+-0.317197 0.386505 -0.866025 2.94643e-08
+-0.341796 0.364932 -0.864171 -0.0566408
+-0.364932 0.341796 -0.858616 -0.113039
+-0.386505 0.317197 -0.849385 -0.168953
+-0.406423 0.291239 -0.836516 -0.224144
+-0.424601 0.264034 -0.820066 -0.278375
+-0.440961 0.235698 -0.800103 -0.331413
+-0.455432 0.206354 -0.776715 -0.383033
+-0.467953 0.176125 -0.75 -0.433013
+-0.47847 0.145142 -0.720074 -0.481138
+-0.486938 0.113538 -0.687064 -0.527203
+-0.493322 0.0814478 -0.651112 -0.57101
+-0.497592 0.0490085 -0.612372 -0.612372
+-0.499732 0.0163596 -0.57101 -0.651112
+0.499732 0.0163595 0.651112 0.57101
+0.497592 0.0490086 0.612372 0.612372
+0.493322 0.0814477 0.57101 0.651112
+0.486938 0.113538 0.527203 0.687064
+0.47847 0.145142 0.481138 0.720074
+0.467953 0.176125 0.433013 0.75
+0.455432 0.206354 0.383033 0.776715
+0.440961 0.235698 0.331414 0.800103
+0.424601 0.264034 0.278375 0.820066
+0.406423 0.291239 0.224144 0.836516
+0.386505 0.317197 0.168953 0.849385
+0.364932 0.341796 0.113039 0.858616
+0.341796 0.364932 0.0566408 0.864171
+0.317197 0.386505 -1.93636e-09 0.866025
+0.291239 0.406423 -0.0566408 0.864171
+0.264034 0.424601 -0.113039 0.858616
+0.235698 0.440961 -0.168953 0.849385
+0.206353 0.455432 -0.224144 0.836516
+0.176125 0.467953 -0.278375 0.820066
+0.145142 0.47847 -0.331414 0.800103
+0.113538 0.486938 -0.383033 0.776715
+0.0814477 0.493322 -0.433013 0.75
+0.0490085 0.497592 -0.481138 0.720074
+0.0163595 0.499732 -0.527203 0.687064
+-0.0163596 0.499732 -0.57101 0.651112
+-0.0490086 0.497592 -0.612372 0.612372
+-0.0814478 0.493322 -0.651112 0.57101
+-0.113538 0.486938 -0.687064 0.527203
+-0.145142 0.47847 -0.720074 0.481138
+-0.176125 0.467953 -0.75 0.433013
+-0.206354 0.455432 -0.776715 0.383033
+-0.235698 0.440961 -0.800103 0.331414
+-0.264034 0.424601 -0.820066 0.278375
+-0.291239 0.406423 -0.836516 0.224144
+-0.317197 0.386505 -0.849385 0.168953
+-0.341796 0.364932 -0.858616 0.113039
+-0.364932 0.341796 -0.864171 0.0566408
+-0.386505 0.317197 -0.866025 -3.97915e-08
+-0.406423 0.291239 -0.864171 -0.0566407
+-0.424601 0.264034 -0.858616 -0.113039
+-0.440961 0.235698 -0.849385 -0.168953
+-0.455432 0.206354 -0.836516 -0.224144
+-0.467953 0.176125 -0.820066 -0.278375
+-0.47847 0.145142 -0.800103 -0.331414
+-0.486938 0.113538 -0.776715 -0.383033
+-0.493322 0.0814478 -0.75 -0.433013
+-0.497592 0.0490085 -0.720074 -0.481138
+-0.499732 0.0163596 -0.687064 -0.527203
+0.539773 0.0176703 0.575329 0.614272
+0.537461 0.0529353 0.533922 0.650585
+0.532848 0.0879736 0.490228 0.684112
+0.525954 0.122635 0.444435 0.71471
+0.516807 0.156772 0.396739 0.742247
+0.505447 0.190237 0.347345 0.766606
+0.491923 0.222887 0.296463 0.787682
+0.476292 0.254583 0.244311 0.805385
+0.458622 0.285189 0.191113 0.81964
+0.438987 0.314574 0.137097 0.830384
+0.417473 0.342612 0.0824937 0.837573
+0.394172 0.369182 0.0275372 0.841175
+0.369182 0.394172 -0.0275372 0.841175
+0.342612 0.417473 -0.0824937 0.837573
+0.314574 0.438987 -0.137097 0.830384
+0.285189 0.458622 -0.191113 0.81964
+0.254583 0.476292 -0.244311 0.805385
+0.222887 0.491923 -0.296463 0.787682
+0.190237 0.505447 -0.347345 0.766606
+0.156772 0.516807 -0.39674 0.742247
+0.122635 0.525954 -0.444435 0.71471
+0.0879735 0.532848 -0.490228 0.684112
+0.0529352 0.537461 -0.533922 0.650585
+0.0176703 0.539773 -0.575329 0.614272
+-0.0176704 0.539773 -0.614272 0.575329
+-0.0529354 0.537461 -0.650585 0.533921
+-0.0879736 0.532848 -0.684112 0.490228
+-0.122635 0.525954 -0.71471 0.444435
+-0.156772 0.516807 -0.742247 0.396739
+-0.190237 0.505447 -0.766606 0.347345
+-0.222887 0.491923 -0.787682 0.296462
+-0.254583 0.476292 -0.805385 0.244311
+-0.285189 0.458622 -0.81964 0.191113
+-0.314574 0.438987 -0.830384 0.137097
+-0.342612 0.417473 -0.837573 0.0824937
+-0.369182 0.394172 -0.841175 0.0275372
+-0.394172 0.369182 -0.841175 -0.0275372
+-0.417473 0.342612 -0.837573 -0.0824938
+-0.438987 0.314574 -0.830384 -0.137097
+-0.458622 0.285189 -0.81964 -0.191113
+-0.476292 0.254583 -0.805385 -0.244311
+-0.491923 0.222887 -0.787682 -0.296463
+-0.505447 0.190237 -0.766606 -0.347345
+-0.516807 0.156772 -0.742247 -0.396739
+-0.525954 0.122635 -0.71471 -0.444435
+-0.532848 0.0879736 -0.684112 -0.490228
+-0.537461 0.0529353 -0.650585 -0.533922
+-0.539773 0.0176704 -0.614272 -0.575329
+0.539773 0.0176703 0.444435 0.71471
+0.537461 0.0529353 0.396739 0.742247
+0.532848 0.0879736 0.347345 0.766606
+0.525954 0.122635 0.296463 0.787682
+0.516807 0.156772 0.244311 0.805385
+0.505447 0.190237 0.191113 0.81964
+0.491923 0.222887 0.137097 0.830384
+0.476292 0.254583 0.0824937 0.837573
+0.458622 0.285189 0.0275372 0.841175
+0.438987 0.314574 -0.0275372 0.841175
+0.417473 0.342612 -0.0824937 0.837573
+0.394172 0.369182 -0.137097 0.830384
+0.369182 0.394172 -0.191113 0.81964
+0.342612 0.417473 -0.244311 0.805385
+0.314574 0.438987 -0.296463 0.787682
+0.285189 0.458622 -0.347345 0.766606
+0.254583 0.476292 -0.39674 0.742247
+0.222887 0.491923 -0.444435 0.71471
+0.190237 0.505447 -0.490228 0.684112
+0.156772 0.516807 -0.533922 0.650585
+0.122635 0.525954 -0.575329 0.614272
+0.0879735 0.532848 -0.614272 0.575329
+0.0529352 0.537461 -0.650585 0.533921
+0.0176703 0.539773 -0.684112 0.490228
+-0.0176704 0.539773 -0.71471 0.444435
+-0.0529354 0.537461 -0.742247 0.396739
+-0.0879736 0.532848 -0.766606 0.347345
+-0.122635 0.525954 -0.787682 0.296463
+-0.156772 0.516807 -0.805385 0.244311
+-0.190237 0.505447 -0.81964 0.191113
+-0.222887 0.491923 -0.830384 0.137097
+-0.254583 0.476292 -0.837573 0.0824938
+-0.285189 0.458622 -0.841175 0.0275372
+-0.314574 0.438987 -0.841175 -0.0275373
+-0.342612 0.417473 -0.837573 -0.0824937
+-0.369182 0.394172 -0.830384 -0.137097
+-0.394172 0.369182 -0.81964 -0.191113
+-0.417473 0.342612 -0.805385 -0.244311
+-0.438987 0.314574 -0.787682 -0.296463
+-0.458622 0.285189 -0.766606 -0.347345
+-0.476292 0.254583 -0.742247 -0.396739
+-0.491923 0.222887 -0.71471 -0.444435
+-0.505447 0.190237 -0.684112 -0.490228
+-0.516807 0.156772 -0.650585 -0.533921
+-0.525954 0.122635 -0.614272 -0.575329
+-0.532848 0.0879736 -0.575329 -0.614272
+-0.537461 0.0529353 -0.533921 -0.650585
+-0.539773 0.0176704 -0.490228 -0.684112
+0.577041 0.0188904 0.361127 0.732294
+0.57457 0.0565902 0.31246 0.754344
+0.569639 0.0940477 0.262454 0.773165
+0.562268 0.131103 0.211325 0.788675
+0.55249 0.167596 0.159291 0.800808
+0.540346 0.203372 0.106574 0.809511
+0.525887 0.238277 0.0534014 0.814748
+0.509177 0.272161 -1.72366e-08 0.816497
+0.490287 0.30488 -0.0534014 0.814748
+0.469297 0.336294 -0.106574 0.809511
+0.446298 0.366267 -0.159291 0.800808
+0.421387 0.394672 -0.211325 0.788675
+0.394672 0.421387 -0.262454 0.773165
+0.366267 0.446298 -0.31246 0.754344
+0.336294 0.469297 -0.361127 0.732294
+0.30488 0.490287 -0.408248 0.707107
+0.272161 0.509178 -0.453621 0.678892
+0.238276 0.525887 -0.497052 0.64777
+0.203372 0.540346 -0.538354 0.613875
+0.167596 0.55249 -0.57735 0.57735
+0.131103 0.562268 -0.613875 0.538354
+0.0940477 0.569639 -0.64777 0.497052
+0.0565902 0.57457 -0.678892 0.453621
+0.0188903 0.577041 -0.707107 0.408248
+-0.0188904 0.577041 -0.732294 0.361127
+-0.0565903 0.57457 -0.754345 0.31246
+-0.0940478 0.569639 -0.773165 0.262454
+-0.131103 0.562268 -0.788675 0.211325
+-0.167596 0.55249 -0.800808 0.15929
+-0.203372 0.540346 -0.809511 0.106574
+-0.238277 0.525887 -0.814748 0.0534013
+-0.272161 0.509178 -0.816497 9.30742e-08
+-0.30488 0.490287 -0.814748 -0.0534014
+-0.336294 0.469297 -0.809511 -0.106574
+-0.366267 0.446298 -0.800808 -0.159291
+-0.394672 0.421387 -0.788675 -0.211325
+-0.421387 0.394672 -0.773165 -0.262454
+-0.446298 0.366267 -0.754344 -0.31246
+-0.469297 0.336294 -0.732294 -0.361127
+-0.490287 0.30488 -0.707107 -0.408248
+-0.509177 0.272161 -0.678892 -0.453621
+-0.525887 0.238277 -0.64777 -0.497052
+-0.540346 0.203372 -0.613875 -0.538354
+-0.55249 0.167596 -0.57735 -0.57735
+-0.562268 0.131103 -0.538354 -0.613875
+-0.569639 0.0940478 -0.497052 -0.64777
+-0.57457 0.0565902 -0.453621 -0.678892
+-0.577041 0.0188904 -0.408248 -0.707107
+0.577041 0.0188904 0.497052 0.64777
+0.57457 0.0565902 0.453621 0.678892
+0.569639 0.0940477 0.408248 0.707107
+0.562268 0.131103 0.361127 0.732294
+0.55249 0.167596 0.31246 0.754344
+0.540346 0.203372 0.262454 0.773165
+0.525887 0.238277 0.211325 0.788675
+0.509177 0.272161 0.159291 0.800808
+0.490287 0.30488 0.106574 0.809511
+0.469297 0.336294 0.0534014 0.814748
+0.446298 0.366267 1.48024e-08 0.816497
+0.421387 0.394672 -0.0534015 0.814748
+0.394672 0.421387 -0.106574 0.809511
+0.366267 0.446298 -0.159291 0.800808
+0.336294 0.469297 -0.211325 0.788675
+0.30488 0.490287 -0.262454 0.773165
+0.272161 0.509178 -0.31246 0.754344
+0.238276 0.525887 -0.361127 0.732294
+0.203372 0.540346 -0.408248 0.707107
+0.167596 0.55249 -0.453621 0.678892
+0.131103 0.562268 -0.497052 0.64777
+0.0940477 0.569639 -0.538354 0.613875
+0.0565902 0.57457 -0.57735 0.57735
+0.0188903 0.577041 -0.613875 0.538354
+-0.0188904 0.577041 -0.64777 0.497052
+-0.0565903 0.57457 -0.678892 0.453621
+-0.0940478 0.569639 -0.707107 0.408248
+-0.131103 0.562268 -0.732294 0.361127
+-0.167596 0.55249 -0.754345 0.31246
+-0.203372 0.540346 -0.773165 0.262454
+-0.238277 0.525887 -0.788675 0.211325
+-0.272161 0.509178 -0.800808 0.159291
+-0.30488 0.490287 -0.809511 0.106574
+-0.336294 0.469297 -0.814748 0.0534014
+-0.366267 0.446298 -0.816497 2.77792e-08
+-0.394672 0.421387 -0.814748 -0.0534015
+-0.421387 0.394672 -0.809511 -0.106574
+-0.446298 0.366267 -0.800808 -0.159291
+-0.469297 0.336294 -0.788675 -0.211325
+-0.490287 0.30488 -0.773165 -0.262454
+-0.509177 0.272161 -0.754345 -0.31246
+-0.525887 0.238277 -0.732294 -0.361127
+-0.540346 0.203372 -0.707107 -0.408248
+-0.55249 0.167596 -0.678892 -0.453621
+-0.562268 0.131103 -0.64777 -0.497052
+-0.569639 0.0940478 -0.613875 -0.538354
+-0.57457 0.0565902 -0.57735 -0.57735
+-0.577041 0.0188904 -0.538354 -0.613875
+0.612045 0.0200363 0.417474 0.671353
+0.609424 0.060023 0.372672 0.69722
+0.604193 0.0997527 0.326274 0.720101
+0.596375 0.139055 0.278478 0.739899
+0.586004 0.177762 0.22949 0.756528
+0.573123 0.215708 0.17952 0.769917
+0.557788 0.25273 0.12878 0.78001
+0.540064 0.28867 0.0774893 0.786763
+0.520028 0.323374 0.0258667 0.790146
+0.497765 0.356693 -0.0258667 0.790146
+0.47337 0.388485 -0.0774893 0.786763
+0.446949 0.418613 -0.12878 0.78001
+0.418613 0.446949 -0.17952 0.769917
+0.388485 0.47337 -0.22949 0.756528
+0.356693 0.497765 -0.278478 0.739899
+0.323374 0.520028 -0.326274 0.720101
+0.28867 0.540064 -0.372672 0.69722
+0.25273 0.557788 -0.417474 0.671353
+0.215708 0.573123 -0.460489 0.642612
+0.177762 0.586004 -0.501532 0.611118
+0.139055 0.596375 -0.540427 0.577008
+0.0997526 0.604193 -0.577008 0.540427
+0.0600229 0.609424 -0.611118 0.501532
+0.0200362 0.612045 -0.642612 0.460489
+-0.0200363 0.612045 -0.671353 0.417474
+-0.060023 0.609424 -0.69722 0.372672
+-0.0997527 0.604193 -0.720101 0.326274
+-0.139055 0.596375 -0.739899 0.278478
+-0.177762 0.586004 -0.756528 0.22949
+-0.215708 0.573123 -0.769917 0.179519
+-0.25273 0.557788 -0.78001 0.12878
+-0.28867 0.540064 -0.786763 0.0774894
+-0.323374 0.520028 -0.790146 0.0258667
+-0.356693 0.497765 -0.790146 -0.0258668
+-0.388485 0.47337 -0.786763 -0.0774893
+-0.418613 0.446949 -0.78001 -0.12878
+-0.446949 0.418613 -0.769917 -0.17952
+-0.47337 0.388485 -0.756528 -0.22949
+-0.497765 0.356693 -0.739899 -0.278478
+-0.520028 0.323374 -0.720101 -0.326274
+-0.540064 0.28867 -0.69722 -0.372672
+-0.557788 0.25273 -0.671353 -0.417474
+-0.573123 0.215708 -0.642612 -0.460489
+-0.586004 0.177762 -0.611118 -0.501532
+-0.596375 0.139055 -0.577008 -0.540427
+-0.604193 0.0997527 -0.540427 -0.577008
+-0.609424 0.060023 -0.501532 -0.611118
+-0.612045 0.0200363 -0.460489 -0.642612
+0.539773 0.0176703 0.684112 0.490228
+0.537461 0.0529353 0.650585 0.533922
+0.532848 0.0879736 0.614272 0.575329
+0.525954 0.122635 0.575329 0.614272
+0.516807 0.156772 0.533922 0.650585
+0.505447 0.190237 0.490228 0.684112
+0.491923 0.222887 0.444435 0.71471
+0.476292 0.254583 0.396739 0.742247
+0.458622 0.285189 0.347345 0.766606
+0.438987 0.314574 0.296463 0.787682
+0.417473 0.342612 0.244311 0.805385
+0.394172 0.369182 0.191113 0.81964
+0.369182 0.394172 0.137097 0.830384
+0.342612 0.417473 0.0824937 0.837573
+0.314574 0.438987 0.0275372 0.841175
+0.285189 0.458622 -0.0275373 0.841175
+0.254583 0.476292 -0.0824938 0.837573
+0.222887 0.491923 -0.137097 0.830384
+0.190237 0.505447 -0.191113 0.81964
+0.156772 0.516807 -0.244311 0.805385
+0.122635 0.525954 -0.296463 0.787682
+0.0879735 0.532848 -0.347345 0.766606
+0.0529352 0.537461 -0.39674 0.742247
+0.0176703 0.539773 -0.444435 0.71471
+-0.0176704 0.539773 -0.490228 0.684112
+-0.0529354 0.537461 -0.533922 0.650585
+-0.0879736 0.532848 -0.575329 0.614272
+-0.122635 0.525954 -0.614272 0.575329
+-0.156772 0.516807 -0.650585 0.533921
+-0.190237 0.505447 -0.684112 0.490228
+-0.222887 0.491923 -0.71471 0.444435
+-0.254583 0.476292 -0.742247 0.39674
+-0.285189 0.458622 -0.766606 0.347345
+-0.314574 0.438987 -0.787682 0.296463
+-0.342612 0.417473 -0.805385 0.244311
+-0.369182 0.394172 -0.81964 0.191113
+-0.394172 0.369182 -0.830384 0.137097
+-0.417473 0.342612 -0.837573 0.0824937
+-0.438987 0.314574 -0.841175 0.0275373
+-0.458622 0.285189 -0.841175 -0.0275372
+-0.476292 0.254583 -0.837573 -0.0824936
+-0.491923 0.222887 -0.830384 -0.137097
+-0.505447 0.190237 -0.81964 -0.191113
+-0.516807 0.156772 -0.805385 -0.244311
+-0.525954 0.122635 -0.787682 -0.296463
+-0.532848 0.0879736 -0.766606 -0.347345
+-0.537461 0.0529353 -0.742247 -0.39674
+-0.539773 0.0176704 -0.71471 -0.444435
+0.577041 0.0188904 0.613875 0.538354
+0.57457 0.0565902 0.57735 0.57735
+0.569639 0.0940477 0.538354 0.613875
+0.562268 0.131103 0.497052 0.64777
+0.55249 0.167596 0.453621 0.678892
+0.540346 0.203372 0.408248 0.707107
+0.525887 0.238277 0.361127 0.732294
+0.509177 0.272161 0.31246 0.754344
+0.490287 0.30488 0.262454 0.773165
+0.469297 0.336294 0.211325 0.788675
+0.446298 0.366267 0.159291 0.800808
+0.421387 0.394672 0.106574 0.809511
+0.394672 0.421387 0.0534014 0.814748
+0.366267 0.446298 -1.82562e-09 0.816497
+0.336294 0.469297 -0.0534015 0.814748
+0.30488 0.490287 -0.106574 0.809511
+0.272161 0.509178 -0.159291 0.800808
+0.238276 0.525887 -0.211325 0.788675
+0.203372 0.540346 -0.262454 0.773165
+0.167596 0.55249 -0.31246 0.754344
+0.131103 0.562268 -0.361127 0.732294
+0.0940477 0.569639 -0.408248 0.707107
+0.0565902 0.57457 -0.453621 0.678892
+0.0188903 0.577041 -0.497052 0.64777
+-0.0188904 0.577041 -0.538354 0.613875
+-0.0565903 0.57457 -0.57735 0.57735
+-0.0940478 0.569639 -0.613875 0.538354
+-0.131103 0.562268 -0.64777 0.497052
+-0.167596 0.55249 -0.678892 0.453621
+-0.203372 0.540346 -0.707107 0.408248
+-0.238277 0.525887 -0.732294 0.361127
+-0.272161 0.509178 -0.754344 0.31246
+-0.30488 0.490287 -0.773165 0.262454
+-0.336294 0.469297 -0.788675 0.211325
+-0.366267 0.446298 -0.800808 0.159291
+-0.394672 0.421387 -0.809511 0.106574
+-0.421387 0.394672 -0.814748 0.0534015
+-0.446298 0.366267 -0.816497 -3.75158e-08
+-0.469297 0.336294 -0.814748 -0.0534014
+-0.490287 0.30488 -0.809511 -0.106574
+-0.509177 0.272161 -0.800808 -0.15929
+-0.525887 0.238277 -0.788675 -0.211325
+-0.540346 0.203372 -0.773165 -0.262454
+-0.55249 0.167596 -0.754344 -0.31246
+-0.562268 0.131103 -0.732294 -0.361127
+-0.569639 0.0940478 -0.707107 -0.408248
+-0.57457 0.0565902 -0.678892 -0.453621
+-0.577041 0.0188904 -0.64777 -0.497052
+0.577041 0.0188904 0.707107 0.408248
+0.57457 0.0565902 0.678892 0.453621
+0.569639 0.0940477 0.64777 0.497052
+0.562268 0.131103 0.613875 0.538354
+0.55249 0.167596 0.57735 0.57735
+0.540346 0.203372 0.538354 0.613875
+0.525887 0.238277 0.497052 0.64777
+0.509177 0.272161 0.453621 0.678892
+0.490287 0.30488 0.408248 0.707107
+0.469297 0.336294 0.361127 0.732294
+0.446298 0.366267 0.31246 0.754344
+0.421387 0.394672 0.262454 0.773165
+0.394672 0.421387 0.211325 0.788675
+0.366267 0.446298 0.159291 0.800808
+0.336294 0.469297 0.106574 0.809511
+0.30488 0.490287 0.0534014 0.814748
+0.272161 0.509178 -6.71206e-08 0.816497
+0.238276 0.525887 -0.0534015 0.814748
+0.203372 0.540346 -0.106574 0.809511
+0.167596 0.55249 -0.159291 0.800808
+0.131103 0.562268 -0.211325 0.788675
+0.0940477 0.569639 -0.262454 0.773165
+0.0565902 0.57457 -0.31246 0.754344
+0.0188903 0.577041 -0.361127 0.732293
+-0.0188904 0.577041 -0.408248 0.707107
+-0.0565903 0.57457 -0.453621 0.678892
+-0.0940478 0.569639 -0.497052 0.64777
+-0.131103 0.562268 -0.538354 0.613875
+-0.167596 0.55249 -0.57735 0.57735
+-0.203372 0.540346 -0.613875 0.538354
+-0.238277 0.525887 -0.64777 0.497052
+-0.272161 0.509178 -0.678892 0.453621
+-0.30488 0.490287 -0.707107 0.408248
+-0.336294 0.469297 -0.732294 0.361127
+-0.366267 0.446298 -0.754344 0.31246
+-0.394672 0.421387 -0.773165 0.262454
+-0.421387 0.394672 -0.788675 0.211325
+-0.446298 0.366267 -0.800808 0.159291
+-0.469297 0.336294 -0.809511 0.106574
+-0.490287 0.30488 -0.814748 0.0534014
+-0.509177 0.272161 -0.816497 9.18571e-08
+-0.525887 0.238277 -0.814748 -0.0534014
+-0.540346 0.203372 -0.809511 -0.106574
+-0.55249 0.167596 -0.800808 -0.159291
+-0.562268 0.131103 -0.788675 -0.211325
+-0.569639 0.0940478 -0.773165 -0.262454
+-0.57457 0.0565902 -0.754344 -0.31246
+-0.577041 0.0188904 -0.732294 -0.361127
+0.612045 0.0200363 0.642612 0.460489
+0.609424 0.060023 0.611118 0.501532
+0.604193 0.0997527 0.577008 0.540427
+0.596375 0.139055 0.540427 0.577008
+0.586004 0.177762 0.501532 0.611118
+0.573123 0.215708 0.460489 0.642612
+0.557788 0.25273 0.417474 0.671353
+0.540064 0.28867 0.372672 0.69722
+0.520028 0.323374 0.326274 0.720101
+0.497765 0.356693 0.278478 0.739899
+0.47337 0.388485 0.22949 0.756528
+0.446949 0.418613 0.17952 0.769917
+0.418613 0.446949 0.12878 0.78001
+0.388485 0.47337 0.0774894 0.786763
+0.356693 0.497765 0.0258667 0.790146
+0.323374 0.520028 -0.0258668 0.790146
+0.28867 0.540064 -0.0774894 0.786763
+0.25273 0.557788 -0.12878 0.78001
+0.215708 0.573123 -0.17952 0.769917
+0.177762 0.586004 -0.22949 0.756528
+0.139055 0.596375 -0.278478 0.739899
+0.0997526 0.604193 -0.326274 0.720101
+0.0600229 0.609424 -0.372672 0.69722
+0.0200362 0.612045 -0.417474 0.671353
+-0.0200363 0.612045 -0.460489 0.642612
+-0.060023 0.609424 -0.501532 0.611118
+-0.0997527 0.604193 -0.540427 0.577008
+-0.139055 0.596375 -0.577008 0.540427
+-0.177762 0.586004 -0.611119 0.501532
+-0.215708 0.573123 -0.642612 0.460489
+-0.25273 0.557788 -0.671353 0.417474
+-0.28867 0.540064 -0.69722 0.372672
+-0.323374 0.520028 -0.720101 0.326274
+-0.356693 0.497765 -0.739899 0.278478
+-0.388485 0.47337 -0.756528 0.22949
+-0.418613 0.446949 -0.769917 0.179519
+-0.446949 0.418613 -0.78001 0.12878
+-0.47337 0.388485 -0.786763 0.0774893
+-0.497765 0.356693 -0.790146 0.0258668
+-0.520028 0.323374 -0.790146 -0.0258667
+-0.540064 0.28867 -0.786763 -0.0774893
+-0.557788 0.25273 -0.78001 -0.12878
+-0.573123 0.215708 -0.769917 -0.17952
+-0.586004 0.177762 -0.756528 -0.22949
+-0.596375 0.139055 -0.739899 -0.278478
+-0.604193 0.0997527 -0.720101 -0.326274
+-0.609424 0.060023 -0.69722 -0.372672
+-0.612045 0.0200363 -0.671353 -0.417474
+0.612045 0.0200363 0.540427 0.577008
+0.609424 0.060023 0.501532 0.611118
+0.604193 0.0997527 0.460489 0.642612
+0.596375 0.139055 0.417474 0.671353
+0.586004 0.177762 0.372672 0.69722
+0.573123 0.215708 0.326274 0.720101
+0.557788 0.25273 0.278478 0.739899
+0.540064 0.28867 0.22949 0.756528
+0.520028 0.323374 0.17952 0.769917
+0.497765 0.356693 0.12878 0.78001
+0.47337 0.388485 0.0774894 0.786763
+0.446949 0.418613 0.0258667 0.790146
+0.418613 0.446949 -0.0258667 0.790146
+0.388485 0.47337 -0.0774894 0.786763
+0.356693 0.497765 -0.12878 0.78001
+0.323374 0.520028 -0.17952 0.769917
+0.28867 0.540064 -0.22949 0.756528
+0.25273 0.557788 -0.278478 0.739899
+0.215708 0.573123 -0.326274 0.720101
+0.177762 0.586004 -0.372672 0.69722
+0.139055 0.596375 -0.417474 0.671353
+0.0997526 0.604193 -0.460489 0.642612
+0.0600229 0.609424 -0.501532 0.611118
+0.0200362 0.612045 -0.540427 0.577008
+-0.0200363 0.612045 -0.577008 0.540427
+-0.060023 0.609424 -0.611118 0.501532
+-0.0997527 0.604193 -0.642612 0.460489
+-0.139055 0.596375 -0.671353 0.417474
+-0.177762 0.586004 -0.69722 0.372672
+-0.215708 0.573123 -0.720101 0.326273
+-0.25273 0.557788 -0.739899 0.278478
+-0.28867 0.540064 -0.756528 0.22949
+-0.323374 0.520028 -0.769917 0.17952
+-0.356693 0.497765 -0.78001 0.12878
+-0.388485 0.47337 -0.786763 0.0774894
+-0.418613 0.446949 -0.790146 0.0258666
+-0.446949 0.418613 -0.790146 -0.0258667
+-0.47337 0.388485 -0.786763 -0.0774894
+-0.497765 0.356693 -0.78001 -0.12878
+-0.520028 0.323374 -0.769917 -0.17952
+-0.540064 0.28867 -0.756528 -0.22949
+-0.557788 0.25273 -0.739899 -0.278478
+-0.573123 0.215708 -0.720101 -0.326274
+-0.586004 0.177762 -0.69722 -0.372672
+-0.596375 0.139055 -0.671353 -0.417474
+-0.604193 0.0997527 -0.642612 -0.460489
+-0.609424 0.060023 -0.611118 -0.501532
+-0.612045 0.0200363 -0.577008 -0.540427
+0.645152 0.0211201 0.464949 0.605934
+0.642389 0.0632698 0.424324 0.635045
+0.636876 0.105149 0.381881 0.661438
+0.628635 0.146577 0.337804 0.684998
+0.617702 0.187378 0.292279 0.705625
+0.604125 0.227376 0.245503 0.72323
+0.58796 0.266401 0.197676 0.737738
+0.569278 0.304285 0.149003 0.749087
+0.548158 0.340866 0.099691 0.757229
+0.52469 0.375988 0.0499525 0.762127
+0.498976 0.409499 1.38464e-08 0.763763
+0.471125 0.441257 -0.0499525 0.762127
+0.441257 0.471125 -0.099691 0.757229
+0.409499 0.498976 -0.149003 0.749087
+0.375988 0.52469 -0.197676 0.737738
+0.340866 0.548158 -0.245504 0.72323
+0.304285 0.569278 -0.292279 0.705625
+0.266401 0.58796 -0.337804 0.684998
+0.227376 0.604125 -0.381881 0.661438
+0.187378 0.617702 -0.424324 0.635045
+0.146577 0.628635 -0.464949 0.605934
+0.105148 0.636876 -0.503584 0.574227
+0.0632697 0.642389 -0.540062 0.540062
+0.02112 0.645152 -0.574227 0.503584
+-0.0211201 0.645152 -0.605934 0.464949
+-0.0632698 0.642389 -0.635045 0.424324
+-0.105149 0.636876 -0.661438 0.381881
+-0.146577 0.628635 -0.684998 0.337804
+-0.187378 0.617702 -0.705625 0.292279
+-0.227377 0.604125 -0.72323 0.245503
+-0.266401 0.58796 -0.737738 0.197676
+-0.304285 0.569278 -0.749087 0.149003
+-0.340866 0.548158 -0.757229 0.099691
+-0.375988 0.52469 -0.762127 0.0499524
+-0.409499 0.498976 -0.763763 2.59851e-08
+-0.441257 0.471125 -0.762127 -0.0499525
+-0.471125 0.441257 -0.757229 -0.099691
+-0.498976 0.409499 -0.749087 -0.149003
+-0.52469 0.375988 -0.737738 -0.197676
+-0.548158 0.340866 -0.72323 -0.245503
+-0.569278 0.304285 -0.705625 -0.292279
+-0.58796 0.266401 -0.684998 -0.337804
+-0.604125 0.227376 -0.661438 -0.381881
+-0.617702 0.187378 -0.635045 -0.424324
+-0.628635 0.146577 -0.605934 -0.464949
+-0.636876 0.105149 -0.574227 -0.503584
+-0.642389 0.0632698 -0.540062 -0.540062
+-0.645152 0.0211201 -0.503584 -0.574227
+0.645152 0.0211201 0.574227 0.503584
+0.642389 0.0632698 0.540062 0.540062
+0.636876 0.105149 0.503584 0.574227
+0.628635 0.146577 0.464949 0.605934
+0.617702 0.187378 0.424324 0.635045
+0.604125 0.227376 0.381881 0.661438
+0.58796 0.266401 0.337804 0.684998
+0.569278 0.304285 0.292279 0.705625
+0.548158 0.340866 0.245503 0.72323
+0.52469 0.375988 0.197676 0.737738
+0.498976 0.409499 0.149003 0.749087
+0.471125 0.441257 0.099691 0.757229
+0.441257 0.471125 0.0499525 0.762127
+0.409499 0.498976 -1.70771e-09 0.763763
+0.375988 0.52469 -0.0499525 0.762127
+0.340866 0.548158 -0.0996911 0.757229
+0.304285 0.569278 -0.149003 0.749087
+0.266401 0.58796 -0.197676 0.737738
+0.227376 0.604125 -0.245503 0.72323
+0.187378 0.617702 -0.292279 0.705625
+0.146577 0.628635 -0.337804 0.684998
+0.105148 0.636876 -0.381881 0.661438
+0.0632697 0.642389 -0.424324 0.635045
+0.02112 0.645152 -0.464949 0.605934
+-0.0211201 0.645152 -0.503584 0.574227
+-0.0632698 0.642389 -0.540062 0.540062
+-0.105149 0.636876 -0.574227 0.503584
+-0.146577 0.628635 -0.605934 0.464949
+-0.187378 0.617702 -0.635045 0.424324
+-0.227377 0.604125 -0.661438 0.381881
+-0.266401 0.58796 -0.684998 0.337803
+-0.304285 0.569278 -0.705625 0.292279
+-0.340866 0.548158 -0.72323 0.245503
+-0.375988 0.52469 -0.737738 0.197676
+-0.409499 0.498976 -0.749087 0.149003
+-0.441257 0.471125 -0.757229 0.099691
+-0.471125 0.441257 -0.762127 0.0499525
+-0.498976 0.409499 -0.763763 -3.50928e-08
+-0.52469 0.375988 -0.762127 -0.0499524
+-0.548158 0.340866 -0.757229 -0.099691
+-0.569278 0.304285 -0.749087 -0.149003
+-0.58796 0.266401 -0.737738 -0.197676
+-0.604125 0.227376 -0.72323 -0.245504
+-0.617702 0.187378 -0.705625 -0.292279
+-0.628635 0.146577 -0.684998 -0.337804
+-0.636876 0.105149 -0.661438 -0.381881
+-0.642389 0.0632698 -0.635045 -0.424324
+-0.645152 0.0211201 -0.605934 -0.464949
+0.676641 0.0221509 0.50311 0.537165
+0.673743 0.0663579 0.466901 0.56892
+0.667961 0.110281 0.428692 0.598239
+0.659318 0.153731 0.388647 0.624996
+0.647852 0.196524 0.346939 0.649076
+0.633611 0.238474 0.303744 0.670378
+0.616658 0.279404 0.259249 0.688808
+0.597064 0.319137 0.213644 0.704289
+0.574913 0.357504 0.167124 0.716754
+0.5503 0.394339 0.119888 0.72615
+0.523331 0.429486 0.0721387 0.732436
+0.49412 0.462794 0.0240806 0.735586
+0.462794 0.49412 -0.0240806 0.735586
+0.429486 0.523331 -0.0721387 0.732436
+0.394339 0.5503 -0.119888 0.72615
+0.357504 0.574913 -0.167124 0.716754
+0.319137 0.597064 -0.213644 0.704289
+0.279404 0.616658 -0.259249 0.688808
+0.238474 0.633611 -0.303744 0.670378
+0.196524 0.647852 -0.346939 0.649076
+0.153731 0.659318 -0.388647 0.624996
+0.110281 0.667961 -0.428692 0.598239
+0.0663578 0.673743 -0.466901 0.56892
+0.0221508 0.676641 -0.50311 0.537165
+-0.022151 0.676641 -0.537165 0.50311
+-0.066358 0.673743 -0.56892 0.466901
+-0.110281 0.667961 -0.598239 0.428692
+-0.153731 0.659318 -0.624996 0.388647
+-0.196524 0.647852 -0.649077 0.346938
+-0.238475 0.633611 -0.670378 0.303744
+-0.279404 0.616658 -0.688808 0.259249
+-0.319137 0.597064 -0.704289 0.213644
+-0.357504 0.574913 -0.716754 0.167124
+-0.394339 0.5503 -0.72615 0.119888
+-0.429486 0.523331 -0.732436 0.0721387
+-0.462794 0.49412 -0.735586 0.0240805
+-0.49412 0.462794 -0.735586 -0.0240805
+-0.523331 0.429486 -0.732436 -0.0721387
+-0.5503 0.394339 -0.72615 -0.119888
+-0.574913 0.357504 -0.716754 -0.167124
+-0.597063 0.319137 -0.704289 -0.213644
+-0.616658 0.279404 -0.688808 -0.259249
+-0.633611 0.238474 -0.670378 -0.303744
+-0.647852 0.196524 -0.649076 -0.346939
+-0.659318 0.153731 -0.624996 -0.388647
+-0.667961 0.110281 -0.598239 -0.428692
+-0.673743 0.0663579 -0.56892 -0.466901
+-0.676641 0.022151 -0.537165 -0.50311
+0.0510037 0.00166969 -0.728913 0.682702
+0.0507853 0.00500192 -0.772003 0.633567
+0.0503494 0.00831272 -0.811788 0.581719
+0.049698 0.0115879 -0.848096 0.52738
+0.0488337 0.0148135 -0.880772 0.470783
+0.0477602 0.0179757 -0.909677 0.412169
+0.0464823 0.0210609 -0.934687 0.351791
+0.0450054 0.0240559 -0.955694 0.289906
+0.0433357 0.0269479 -0.972608 0.22678
+0.0414804 0.0297244 -0.985358 0.162683
+0.0394475 0.0323737 -0.993888 0.0978894
+0.0372457 0.0348844 -0.998162 0.0326764
+0.0348844 0.0372457 -0.998162 -0.0326765
+0.0323737 0.0394475 -0.993888 -0.0978894
+0.0297244 0.0414804 -0.985358 -0.162683
+0.0269478 0.0433357 -0.972608 -0.22678
+0.0240559 0.0450054 -0.955693 -0.289907
+0.0210609 0.0464823 -0.934686 -0.351791
+0.0179757 0.0477603 -0.909677 -0.412169
+0.0148135 0.0488337 -0.880772 -0.470783
+0.0115879 0.049698 -0.848096 -0.52738
+0.00831272 0.0503494 -0.811788 -0.581719
+0.00500191 0.0507853 -0.772003 -0.633567
+0.00166968 0.0510037 -0.728913 -0.682702
+-0.00166969 0.0510037 -0.682702 -0.728913
+-0.00500192 0.0507853 -0.633567 -0.772003
+-0.00831273 0.0503494 -0.581719 -0.811788
+-0.0115879 0.049698 -0.52738 -0.848096
+-0.0148135 0.0488337 -0.470782 -0.880772
+-0.0179757 0.0477602 -0.412169 -0.909677
+-0.0210609 0.0464823 -0.351791 -0.934687
+-0.0240559 0.0450054 -0.289907 -0.955693
+-0.0269478 0.0433357 -0.22678 -0.972608
+-0.0297244 0.0414804 -0.162683 -0.985358
+-0.0323737 0.0394475 -0.0978895 -0.993888
+-0.0348844 0.0372457 -0.0326764 -0.998162
+-0.0372457 0.0348844 0.0326764 -0.998162
+-0.0394475 0.0323737 0.0978895 -0.993888
+-0.0414804 0.0297244 0.162683 -0.985358
+-0.0433357 0.0269478 0.22678 -0.972608
+-0.0450054 0.0240559 0.289906 -0.955694
+-0.0464823 0.0210609 0.351791 -0.934687
+-0.0477603 0.0179757 0.412169 -0.909677
+-0.0488337 0.0148135 0.470783 -0.880772
+-0.049698 0.0115879 0.52738 -0.848096
+-0.0503494 0.00831273 0.581719 -0.811788
+-0.0507853 0.00500191 0.633567 -0.772003
+-0.0510037 0.00166969 0.682702 -0.728913
+0.102007 0.00333938 -0.931019 0.350411
+0.101571 0.0100038 -0.951943 0.288769
+0.100699 0.0166254 -0.968791 0.22589
+0.0993959 0.0231759 -0.981491 0.162045
+0.0976673 0.0296271 -0.989988 0.0975053
+0.0955205 0.0359514 -0.994245 0.0325482
+0.0929646 0.0421217 -0.994245 -0.0325482
+0.0900107 0.0481117 -0.989988 -0.0975053
+0.0866713 0.0538957 -0.981491 -0.162045
+0.0829608 0.0594489 -0.968791 -0.22589
+0.0788951 0.0647475 -0.951943 -0.288769
+0.0744914 0.0697688 -0.931019 -0.350411
+0.0697688 0.0744914 -0.906107 -0.410552
+0.0647475 0.078895 -0.877316 -0.468935
+0.0594489 0.0829608 -0.844768 -0.52531
+0.0538957 0.0866713 -0.808602 -0.579436
+0.0481117 0.0900107 -0.768974 -0.631081
+0.0421217 0.0929647 -0.726053 -0.680023
+0.0359514 0.0955205 -0.680023 -0.726053
+0.0296271 0.0976673 -0.63108 -0.768974
+0.0231759 0.0993959 -0.579436 -0.808602
+0.0166254 0.100699 -0.52531 -0.844768
+0.0100038 0.101571 -0.468935 -0.877316
+0.00333937 0.102007 -0.410552 -0.906107
+-0.00333939 0.102007 -0.350411 -0.931019
+-0.0100038 0.101571 -0.288769 -0.951943
+-0.0166255 0.100699 -0.22589 -0.968791
+-0.0231759 0.0993959 -0.162045 -0.981491
+-0.0296271 0.0976673 -0.0975051 -0.989988
+-0.0359514 0.0955205 -0.0325481 -0.994245
+-0.0421217 0.0929646 0.0325484 -0.994245
+-0.0481117 0.0900107 0.0975052 -0.989988
+-0.0538957 0.0866713 0.162045 -0.981491
+-0.0594489 0.0829608 0.225891 -0.968791
+-0.0647475 0.0788951 0.288769 -0.951943
+-0.0697689 0.0744914 0.350411 -0.931019
+-0.0744914 0.0697689 0.410552 -0.906107
+-0.0788951 0.0647475 0.468935 -0.877316
+-0.0829608 0.0594489 0.52531 -0.844768
+-0.0866713 0.0538957 0.579436 -0.808602
+-0.0900107 0.0481117 0.63108 -0.768974
+-0.0929646 0.0421217 0.680023 -0.726053
+-0.0955205 0.0359514 0.726053 -0.680023
+-0.0976673 0.0296271 0.768974 -0.631081
+-0.0993959 0.0231759 0.808602 -0.579436
+-0.100699 0.0166255 0.844768 -0.52531
+-0.101571 0.0100038 0.877316 -0.468935
+-0.102007 0.00333939 0.906107 -0.410552
+0.102007 0.00333938 -0.410552 0.906107
+0.101571 0.0100038 -0.468935 0.877316
+0.100699 0.0166254 -0.52531 0.844768
+0.0993959 0.0231759 -0.579436 0.808602
+0.0976673 0.0296271 -0.631081 0.768974
+0.0955205 0.0359514 -0.680023 0.726053
+0.0929646 0.0421217 -0.726053 0.680023
+0.0900107 0.0481117 -0.768974 0.63108
+0.0866713 0.0538957 -0.808602 0.579436
+0.0829608 0.0594489 -0.844768 0.52531
+0.0788951 0.0647475 -0.877316 0.468935
+0.0744914 0.0697688 -0.906107 0.410552
+0.0697688 0.0744914 -0.931019 0.350411
+0.0647475 0.078895 -0.951943 0.288769
+0.0594489 0.0829608 -0.968791 0.22589
+0.0538957 0.0866713 -0.981491 0.162045
+0.0481117 0.0900107 -0.989988 0.0975052
+0.0421217 0.0929647 -0.994245 0.0325482
+0.0359514 0.0955205 -0.994245 -0.0325483
+0.0296271 0.0976673 -0.989988 -0.0975053
+0.0231759 0.0993959 -0.981491 -0.162045
+0.0166254 0.100699 -0.968791 -0.225891
+0.0100038 0.101571 -0.951943 -0.288769
+0.00333937 0.102007 -0.931019 -0.350411
+-0.00333939 0.102007 -0.906107 -0.410552
+-0.0100038 0.101571 -0.877316 -0.468935
+-0.0166255 0.100699 -0.844768 -0.52531
+-0.0231759 0.0993959 -0.808602 -0.579436
+-0.0296271 0.0976673 -0.768974 -0.631081
+-0.0359514 0.0955205 -0.726053 -0.680023
+-0.0421217 0.0929646 -0.680023 -0.726053
+-0.0481117 0.0900107 -0.631081 -0.768974
+-0.0538957 0.0866713 -0.579436 -0.808602
+-0.0594489 0.0829608 -0.52531 -0.844768
+-0.0647475 0.0788951 -0.468935 -0.877316
+-0.0697689 0.0744914 -0.410552 -0.906107
+-0.0744914 0.0697689 -0.350411 -0.931019
+-0.0788951 0.0647475 -0.288769 -0.951943
+-0.0829608 0.0594489 -0.225891 -0.968791
+-0.0866713 0.0538957 -0.162045 -0.981491
+-0.0900107 0.0481117 -0.0975054 -0.989988
+-0.0929646 0.0421217 -0.0325482 -0.994245
+-0.0955205 0.0359514 0.0325483 -0.994245
+-0.0976673 0.0296271 0.0975053 -0.989988
+-0.0993959 0.0231759 0.162045 -0.981491
+-0.100699 0.0166255 0.22589 -0.968791
+-0.101571 0.0100038 0.288769 -0.951943
+-0.102007 0.00333939 0.350411 -0.931019
+0.153011 0.00500907 -0.72126 0.675534
+0.152356 0.0150057 -0.763898 0.626915
+0.151048 0.0249382 -0.803265 0.575611
+0.149094 0.0347638 -0.839192 0.521843
+0.146501 0.0444406 -0.871525 0.46584
+0.143281 0.0539271 -0.900126 0.407842
+0.139447 0.0631826 -0.924873 0.348098
+0.135016 0.0721676 -0.94566 0.286863
+0.130007 0.0808436 -0.962397 0.224399
+0.124441 0.0891733 -0.975013 0.160975
+0.118343 0.0971212 -0.983453 0.0968617
+0.111737 0.104653 -0.987683 0.0323334
+0.104653 0.111737 -0.987683 -0.0323334
+0.0971212 0.118343 -0.983453 -0.0968617
+0.0891733 0.124441 -0.975013 -0.160975
+0.0808435 0.130007 -0.962397 -0.2244
+0.0721676 0.135016 -0.94566 -0.286863
+0.0631826 0.139447 -0.924873 -0.348098
+0.053927 0.143281 -0.900126 -0.407842
+0.0444406 0.146501 -0.871525 -0.46584
+0.0347638 0.149094 -0.839192 -0.521843
+0.0249382 0.151048 -0.803265 -0.575611
+0.0150057 0.152356 -0.763898 -0.626915
+0.00500905 0.153011 -0.72126 -0.675534
+-0.00500908 0.153011 -0.675534 -0.72126
+-0.0150058 0.152356 -0.626915 -0.763898
+-0.0249382 0.151048 -0.575611 -0.803265
+-0.0347638 0.149094 -0.521843 -0.839192
+-0.0444406 0.146501 -0.46584 -0.871525
+-0.0539271 0.143281 -0.407842 -0.900126
+-0.0631826 0.139447 -0.348098 -0.924873
+-0.0721676 0.135016 -0.286863 -0.94566
+-0.0808435 0.130007 -0.224399 -0.962397
+-0.0891733 0.124441 -0.160975 -0.975013
+-0.0971212 0.118343 -0.0968617 -0.983453
+-0.104653 0.111737 -0.0323333 -0.987683
+-0.111737 0.104653 0.0323333 -0.987683
+-0.118343 0.0971212 0.0968617 -0.983453
+-0.124441 0.0891733 0.160975 -0.975013
+-0.130007 0.0808435 0.224399 -0.962397
+-0.135016 0.0721676 0.286863 -0.94566
+-0.139447 0.0631826 0.348098 -0.924873
+-0.143281 0.053927 0.407842 -0.900126
+-0.146501 0.0444406 0.46584 -0.871525
+-0.149094 0.0347638 0.521843 -0.839192
+-0.151048 0.0249382 0.575611 -0.803265
+-0.152356 0.0150057 0.626915 -0.763898
+-0.153011 0.00500908 0.675534 -0.72126
+0.153011 0.00500907 -0.962397 0.224399
+0.152356 0.0150057 -0.975013 0.160975
+0.151048 0.0249382 -0.983453 0.0968617
+0.149094 0.0347638 -0.987683 0.0323334
+0.146501 0.0444406 -0.987683 -0.0323334
+0.143281 0.0539271 -0.983453 -0.0968617
+0.139447 0.0631826 -0.975013 -0.160975
+0.135016 0.0721676 -0.962397 -0.224399
+0.130007 0.0808436 -0.94566 -0.286863
+0.124441 0.0891733 -0.924873 -0.348098
+0.118343 0.0971212 -0.900126 -0.407842
+0.111737 0.104653 -0.871525 -0.46584
+0.104653 0.111737 -0.839192 -0.521843
+0.0971212 0.118343 -0.803265 -0.575611
+0.0891733 0.124441 -0.763898 -0.626915
+0.0808435 0.130007 -0.72126 -0.675534
+0.0721676 0.135016 -0.675534 -0.72126
+0.0631826 0.139447 -0.626915 -0.763898
+0.053927 0.143281 -0.575611 -0.803265
+0.0444406 0.146501 -0.521843 -0.839192
+0.0347638 0.149094 -0.46584 -0.871525
+0.0249382 0.151048 -0.407842 -0.900126
+0.0150057 0.152356 -0.348098 -0.924873
+0.00500905 0.153011 -0.286863 -0.94566
+-0.00500908 0.153011 -0.224399 -0.962397
+-0.0150058 0.152356 -0.160975 -0.975013
+-0.0249382 0.151048 -0.0968616 -0.983453
+-0.0347638 0.149094 -0.0323333 -0.987683
+-0.0444406 0.146501 0.0323335 -0.987683
+-0.0539271 0.143281 0.0968618 -0.983453
+-0.0631826 0.139447 0.160975 -0.975013
+-0.0721676 0.135016 0.224399 -0.962397
+-0.0808435 0.130007 0.286863 -0.94566
+-0.0891733 0.124441 0.348098 -0.924873
+-0.0971212 0.118343 0.407842 -0.900126
+-0.104653 0.111737 0.46584 -0.871525
+-0.111737 0.104653 0.521843 -0.839192
+-0.118343 0.0971212 0.575611 -0.803265
+-0.124441 0.0891733 0.626915 -0.763898
+-0.130007 0.0808435 0.675534 -0.72126
+-0.135016 0.0721676 0.72126 -0.675534
+-0.139447 0.0631826 0.763898 -0.626915
+-0.143281 0.053927 0.803265 -0.575611
+-0.146501 0.0444406 0.839192 -0.521843
+-0.149094 0.0347638 0.871525 -0.46584
+-0.151048 0.0249382 0.900126 -0.407842
+-0.152356 0.0150057 0.924873 -0.348098
+-0.153011 0.00500908 0.94566 -0.286863
+0.204015 0.00667875 -0.96587 0.159466
+0.203141 0.0200077 -0.974231 0.0959534
+0.201398 0.0332509 -0.978421 0.0320302
+0.198792 0.0463518 -0.978421 -0.0320302
+0.195335 0.0592541 -0.974231 -0.0959534
+0.191041 0.0719027 -0.96587 -0.159466
+0.185929 0.0842435 -0.953372 -0.222295
+0.180021 0.0962235 -0.936792 -0.284173
+0.173343 0.107791 -0.9162 -0.344833
+0.165922 0.118898 -0.891686 -0.404017
+0.15779 0.129495 -0.863352 -0.461471
+0.148983 0.139538 -0.831322 -0.516949
+0.139538 0.148983 -0.795732 -0.570214
+0.129495 0.15779 -0.756735 -0.621036
+0.118898 0.165922 -0.714497 -0.669199
+0.107791 0.173343 -0.669199 -0.714497
+0.0962234 0.180021 -0.621036 -0.756735
+0.0842435 0.185929 -0.570214 -0.795732
+0.0719027 0.191041 -0.516949 -0.831322
+0.0592541 0.195335 -0.461471 -0.863352
+0.0463517 0.198792 -0.404017 -0.891686
+0.0332509 0.201398 -0.344833 -0.9162
+0.0200076 0.203141 -0.284173 -0.936792
+0.00667873 0.204015 -0.222295 -0.953372
+-0.00667877 0.204015 -0.159466 -0.96587
+-0.0200077 0.203141 -0.0959533 -0.974231
+-0.0332509 0.201398 -0.0320301 -0.978421
+-0.0463518 0.198792 0.0320302 -0.978421
+-0.0592541 0.195335 0.0959535 -0.974231
+-0.0719028 0.191041 0.159466 -0.96587
+-0.0842435 0.185929 0.222295 -0.953372
+-0.0962234 0.180021 0.284173 -0.936792
+-0.107791 0.173343 0.344833 -0.9162
+-0.118898 0.165922 0.404018 -0.891686
+-0.129495 0.15779 0.461471 -0.863352
+-0.139538 0.148983 0.516949 -0.831322
+-0.148983 0.139538 0.570214 -0.795732
+-0.15779 0.129495 0.621036 -0.756735
+-0.165922 0.118898 0.669199 -0.714497
+-0.173343 0.107791 0.714497 -0.669199
+-0.180021 0.0962235 0.756735 -0.621036
+-0.185929 0.0842435 0.795732 -0.570214
+-0.191041 0.0719027 0.831322 -0.516949
+-0.195335 0.0592541 0.863352 -0.461472
+-0.198792 0.0463517 0.891686 -0.404017
+-0.201398 0.0332509 0.9162 -0.344833
+-0.203141 0.0200077 0.936792 -0.284173
+-0.204015 0.00667877 0.953372 -0.222295
+0.204015 0.00667875 -0.831322 0.516949
+0.203141 0.0200077 -0.863352 0.461471
+0.201398 0.0332509 -0.891686 0.404017
+0.198792 0.0463518 -0.9162 0.344833
+0.195335 0.0592541 -0.936792 0.284173
+0.191041 0.0719027 -0.953372 0.222295
+0.185929 0.0842435 -0.96587 0.159466
+0.180021 0.0962235 -0.974231 0.0959534
+0.173343 0.107791 -0.978421 0.0320302
+0.165922 0.118898 -0.978421 -0.0320302
+0.15779 0.129495 -0.974231 -0.0959534
+0.148983 0.139538 -0.96587 -0.159466
+0.139538 0.148983 -0.953372 -0.222295
+0.129495 0.15779 -0.936792 -0.284173
+0.118898 0.165922 -0.9162 -0.344833
+0.107791 0.173343 -0.891686 -0.404018
+0.0962234 0.180021 -0.863352 -0.461472
+0.0842435 0.185929 -0.831322 -0.516949
+0.0719027 0.191041 -0.795732 -0.570214
+0.0592541 0.195335 -0.756735 -0.621036
+0.0463517 0.198792 -0.714497 -0.669199
+0.0332509 0.201398 -0.669199 -0.714497
+0.0200076 0.203141 -0.621036 -0.756735
+0.00667873 0.204015 -0.570214 -0.795732
+-0.00667877 0.204015 -0.516949 -0.831322
+-0.0200077 0.203141 -0.461471 -0.863352
+-0.0332509 0.201398 -0.404017 -0.891686
+-0.0463518 0.198792 -0.344833 -0.9162
+-0.0592541 0.195335 -0.284173 -0.936792
+-0.0719028 0.191041 -0.222295 -0.953372
+-0.0842435 0.185929 -0.159466 -0.96587
+-0.0962234 0.180021 -0.0959535 -0.974231
+-0.107791 0.173343 -0.0320302 -0.978421
+-0.118898 0.165922 0.0320303 -0.978421
+-0.129495 0.15779 0.0959534 -0.974231
+-0.139538 0.148983 0.159466 -0.96587
+-0.148983 0.139538 0.222295 -0.953372
+-0.15779 0.129495 0.284173 -0.936792
+-0.165922 0.118898 0.344833 -0.9162
+-0.173343 0.107791 0.404018 -0.891686
+-0.180021 0.0962235 0.461471 -0.863352
+-0.185929 0.0842435 0.516949 -0.831322
+-0.191041 0.0719027 0.570214 -0.795732
+-0.195335 0.0592541 0.621036 -0.756735
+-0.198792 0.0463517 0.669199 -0.714497
+-0.201398 0.0332509 0.714497 -0.669199
+-0.203141 0.0200077 0.756735 -0.621036
+-0.204015 0.00667877 0.795732 -0.570214
+0.255019 0.00834844 -0.875416 0.41054
+0.253927 0.0250096 -0.900392 0.352407
+0.251747 0.0415636 -0.921513 0.292764
+0.24849 0.0579397 -0.938687 0.231867
+0.244168 0.0740676 -0.951842 0.169977
+0.238801 0.0898784 -0.960921 0.10736
+0.232412 0.105304 -0.965886 0.0442829
+0.225027 0.120279 -0.966714 -0.0189838
+0.216678 0.134739 -0.963402 -0.0821693
+0.207402 0.148622 -0.955965 -0.145003
+0.197238 0.161869 -0.944435 -0.207216
+0.186229 0.174422 -0.92886 -0.268541
+0.174422 0.186229 -0.909308 -0.328716
+0.161869 0.197238 -0.885862 -0.387484
+0.148622 0.207402 -0.858623 -0.444593
+0.134739 0.216678 -0.827707 -0.499797
+0.120279 0.225027 -0.793246 -0.552862
+0.105304 0.232412 -0.755389 -0.603559
+0.0898784 0.238801 -0.714297 -0.651671
+0.0740676 0.244168 -0.670146 -0.696993
+0.0579397 0.24849 -0.623126 -0.739331
+0.0415636 0.251747 -0.573437 -0.778502
+0.0250096 0.253927 -0.521293 -0.81434
+0.00834842 0.255019 -0.466916 -0.846691
+-0.00834847 0.255019 -0.41054 -0.875416
+-0.0250096 0.253927 -0.352406 -0.900392
+-0.0415636 0.251747 -0.292764 -0.921513
+-0.0579397 0.24849 -0.231867 -0.938687
+-0.0740677 0.244168 -0.169977 -0.951842
+-0.0898785 0.238801 -0.10736 -0.960921
+-0.105304 0.232412 -0.0442828 -0.965886
+-0.120279 0.225027 0.0189837 -0.966714
+-0.134739 0.216678 0.0821693 -0.963402
+-0.148622 0.207402 0.145003 -0.955965
+-0.161869 0.197238 0.207216 -0.944435
+-0.174422 0.186229 0.268541 -0.92886
+-0.186229 0.174422 0.328716 -0.909308
+-0.197238 0.161869 0.387484 -0.885862
+-0.207402 0.148622 0.444593 -0.858623
+-0.216678 0.134739 0.499797 -0.827707
+-0.225027 0.120279 0.552862 -0.793246
+-0.232412 0.105304 0.603559 -0.755389
+-0.238801 0.0898784 0.651672 -0.714297
+-0.244168 0.0740676 0.696993 -0.670146
+-0.24849 0.0579397 0.739331 -0.623126
+-0.251747 0.0415636 0.778502 -0.573437
+-0.253927 0.0250096 0.81434 -0.521293
+-0.255019 0.00834847 0.846691 -0.466916
+0.153011 0.00500907 -0.286863 0.94566
+0.152356 0.0150057 -0.348098 0.924873
+0.151048 0.0249382 -0.407842 0.900126
+0.149094 0.0347638 -0.46584 0.871525
+0.146501 0.0444406 -0.521843 0.839192
+0.143281 0.0539271 -0.575611 0.803265
+0.139447 0.0631826 -0.626915 0.763898
+0.135016 0.0721676 -0.675534 0.72126
+0.130007 0.0808436 -0.72126 0.675534
+0.124441 0.0891733 -0.763898 0.626915
+0.118343 0.0971212 -0.803265 0.575611
+0.111737 0.104653 -0.839192 0.521843
+0.104653 0.111737 -0.871525 0.46584
+0.0971212 0.118343 -0.900126 0.407842
+0.0891733 0.124441 -0.924873 0.348098
+0.0808435 0.130007 -0.94566 0.286863
+0.0721676 0.135016 -0.962397 0.224399
+0.0631826 0.139447 -0.975013 0.160975
+0.053927 0.143281 -0.983453 0.0968616
+0.0444406 0.146501 -0.987683 0.0323333
+0.0347638 0.149094 -0.987683 -0.0323334
+0.0249382 0.151048 -0.983453 -0.0968618
+0.0150057 0.152356 -0.975013 -0.160975
+0.00500905 0.153011 -0.962397 -0.2244
+-0.00500908 0.153011 -0.94566 -0.286863
+-0.0150058 0.152356 -0.924873 -0.348098
+-0.0249382 0.151048 -0.900126 -0.407842
+-0.0347638 0.149094 -0.871525 -0.46584
+-0.0444406 0.146501 -0.839192 -0.521843
+-0.0539271 0.143281 -0.803265 -0.575611
+-0.0631826 0.139447 -0.763898 -0.626915
+-0.0721676 0.135016 -0.72126 -0.675534
+-0.0808435 0.130007 -0.675534 -0.72126
+-0.0891733 0.124441 -0.626915 -0.763898
+-0.0971212 0.118343 -0.575611 -0.803265
+-0.104653 0.111737 -0.521843 -0.839192
+-0.111737 0.104653 -0.46584 -0.871525
+-0.118343 0.0971212 -0.407842 -0.900126
+-0.124441 0.0891733 -0.348098 -0.924873
+-0.130007 0.0808435 -0.286863 -0.94566
+-0.135016 0.0721676 -0.2244 -0.962397
+-0.139447 0.0631826 -0.160975 -0.975013
+-0.143281 0.053927 -0.0968616 -0.983453
+-0.146501 0.0444406 -0.0323334 -0.987683
+-0.149094 0.0347638 0.0323335 -0.987683
+-0.151048 0.0249382 0.0968616 -0.983453
+-0.152356 0.0150057 0.160975 -0.975013
+-0.153011 0.00500908 0.224399 -0.962397
+0.204015 0.00667875 -0.570214 0.795732
+0.203141 0.0200077 -0.621036 0.756735
+0.201398 0.0332509 -0.669199 0.714497
+0.198792 0.0463518 -0.714497 0.669199
+0.195335 0.0592541 -0.756735 0.621036
+0.191041 0.0719027 -0.795732 0.570214
+0.185929 0.0842435 -0.831322 0.516949
+0.180021 0.0962235 -0.863352 0.461471
+0.173343 0.107791 -0.891686 0.404017
+0.165922 0.118898 -0.9162 0.344833
+0.15779 0.129495 -0.936792 0.284173
+0.148983 0.139538 -0.953372 0.222295
+0.139538 0.148983 -0.96587 0.159466
+0.129495 0.15779 -0.974231 0.0959534
+0.118898 0.165922 -0.978421 0.0320301
+0.107791 0.173343 -0.978421 -0.0320303
+0.0962234 0.180021 -0.974231 -0.0959535
+0.0842435 0.185929 -0.96587 -0.159466
+0.0719027 0.191041 -0.953372 -0.222295
+0.0592541 0.195335 -0.936792 -0.284173
+0.0463517 0.198792 -0.9162 -0.344833
+0.0332509 0.201398 -0.891686 -0.404018
+0.0200076 0.203141 -0.863352 -0.461472
+0.00667873 0.204015 -0.831322 -0.516949
+-0.00667877 0.204015 -0.795732 -0.570214
+-0.0200077 0.203141 -0.756735 -0.621036
+-0.0332509 0.201398 -0.714497 -0.669199
+-0.0463518 0.198792 -0.669199 -0.714497
+-0.0592541 0.195335 -0.621036 -0.756735
+-0.0719028 0.191041 -0.570214 -0.795732
+-0.0842435 0.185929 -0.516949 -0.831322
+-0.0962234 0.180021 -0.461472 -0.863352
+-0.107791 0.173343 -0.404017 -0.891686
+-0.118898 0.165922 -0.344833 -0.9162
+-0.129495 0.15779 -0.284173 -0.936792
+-0.139538 0.148983 -0.222295 -0.953372
+-0.148983 0.139538 -0.159466 -0.96587
+-0.15779 0.129495 -0.0959533 -0.974231
+-0.165922 0.118898 -0.0320303 -0.978421
+-0.173343 0.107791 0.0320302 -0.978421
+-0.180021 0.0962235 0.0959533 -0.974231
+-0.185929 0.0842435 0.159466 -0.96587
+-0.191041 0.0719027 0.222295 -0.953372
+-0.195335 0.0592541 0.284173 -0.936792
+-0.198792 0.0463517 0.344833 -0.9162
+-0.201398 0.0332509 0.404017 -0.891686
+-0.203141 0.0200077 0.461472 -0.863352
+-0.204015 0.00667877 0.516949 -0.831322
+0.204015 0.00667875 -0.222295 0.953372
+0.203141 0.0200077 -0.284173 0.936792
+0.201398 0.0332509 -0.344833 0.9162
+0.198792 0.0463518 -0.404017 0.891686
+0.195335 0.0592541 -0.461471 0.863352
+0.191041 0.0719027 -0.516949 0.831322
+0.185929 0.0842435 -0.570214 0.795732
+0.180021 0.0962235 -0.621036 0.756735
+0.173343 0.107791 -0.669199 0.714497
+0.165922 0.118898 -0.714497 0.669199
+0.15779 0.129495 -0.756735 0.621036
+0.148983 0.139538 -0.795732 0.570214
+0.139538 0.148983 -0.831322 0.516949
+0.129495 0.15779 -0.863352 0.461471
+0.118898 0.165922 -0.891686 0.404017
+0.107791 0.173343 -0.9162 0.344833
+0.0962234 0.180021 -0.936792 0.284173
+0.0842435 0.185929 -0.953372 0.222295
+0.0719027 0.191041 -0.96587 0.159466
+0.0592541 0.195335 -0.974231 0.0959533
+0.0463517 0.198792 -0.978421 0.0320302
+0.0332509 0.201398 -0.978421 -0.0320303
+0.0200076 0.203141 -0.974231 -0.0959535
+0.00667873 0.204015 -0.96587 -0.159466
+-0.00667877 0.204015 -0.953372 -0.222295
+-0.0200077 0.203141 -0.936792 -0.284173
+-0.0332509 0.201398 -0.9162 -0.344833
+-0.0463518 0.198792 -0.891686 -0.404018
+-0.0592541 0.195335 -0.863352 -0.461472
+-0.0719028 0.191041 -0.831322 -0.51695
+-0.0842435 0.185929 -0.795732 -0.570214
+-0.0962234 0.180021 -0.756735 -0.621036
+-0.107791 0.173343 -0.714497 -0.669199
+-0.118898 0.165922 -0.669199 -0.714497
+-0.129495 0.15779 -0.621036 -0.756735
+-0.139538 0.148983 -0.570214 -0.795732
+-0.148983 0.139538 -0.516949 -0.831322
+-0.15779 0.129495 -0.461471 -0.863352
+-0.165922 0.118898 -0.404018 -0.891686
+-0.173343 0.107791 -0.344833 -0.9162
+-0.180021 0.0962235 -0.284173 -0.936792
+-0.185929 0.0842435 -0.222295 -0.953372
+-0.191041 0.0719027 -0.159466 -0.96587
+-0.195335 0.0592541 -0.0959534 -0.974231
+-0.198792 0.0463517 -0.0320301 -0.978421
+-0.201398 0.0332509 0.0320301 -0.978421
+-0.203141 0.0200077 0.0959534 -0.974231
+-0.204015 0.00667877 0.159466 -0.96587
+0.255019 0.00834844 -0.466916 0.846691
+0.253927 0.0250096 -0.521293 0.81434
+0.251747 0.0415636 -0.573437 0.778502
+0.24849 0.0579397 -0.623126 0.739331
+0.244168 0.0740676 -0.670146 0.696993
+0.238801 0.0898784 -0.714297 0.651671
+0.232412 0.105304 -0.755389 0.603559
+0.225027 0.120279 -0.793246 0.552862
+0.216678 0.134739 -0.827707 0.499797
+0.207402 0.148622 -0.858623 0.444593
+0.197238 0.161869 -0.885862 0.387484
+0.186229 0.174422 -0.909308 0.328716
+0.174422 0.186229 -0.92886 0.268541
+0.161869 0.197238 -0.944435 0.207216
+0.148622 0.207402 -0.955965 0.145003
+0.134739 0.216678 -0.963402 0.0821692
+0.120279 0.225027 -0.966714 0.0189837
+0.105304 0.232412 -0.965886 -0.044283
+0.0898784 0.238801 -0.960921 -0.10736
+0.0740676 0.244168 -0.951842 -0.169977
+0.0579397 0.24849 -0.938687 -0.231867
+0.0415636 0.251747 -0.921512 -0.292764
+0.0250096 0.253927 -0.900392 -0.352407
+0.00834842 0.255019 -0.875415 -0.410541
+-0.00834847 0.255019 -0.846691 -0.466916
+-0.0250096 0.253927 -0.81434 -0.521293
+-0.0415636 0.251747 -0.778502 -0.573437
+-0.0579397 0.24849 -0.739331 -0.623126
+-0.0740677 0.244168 -0.696993 -0.670146
+-0.0898785 0.238801 -0.651671 -0.714297
+-0.105304 0.232412 -0.603559 -0.755389
+-0.120279 0.225027 -0.552862 -0.793246
+-0.134739 0.216678 -0.499797 -0.827707
+-0.148622 0.207402 -0.444593 -0.858623
+-0.161869 0.197238 -0.387484 -0.885862
+-0.174422 0.186229 -0.328716 -0.909308
+-0.186229 0.174422 -0.268541 -0.92886
+-0.197238 0.161869 -0.207216 -0.944435
+-0.207402 0.148622 -0.145003 -0.955965
+-0.216678 0.134739 -0.0821693 -0.963402
+-0.225027 0.120279 -0.0189839 -0.966714
+-0.232412 0.105304 0.0442829 -0.965886
+-0.238801 0.0898784 0.10736 -0.960921
+-0.244168 0.0740676 0.169977 -0.951842
+-0.24849 0.0579397 0.231867 -0.938687
+-0.251747 0.0415636 0.292764 -0.921513
+-0.253927 0.0250096 0.352407 -0.900392
+-0.255019 0.00834847 0.41054 -0.875416
+0.255019 0.00834844 -0.705706 0.660965
+0.253927 0.0250096 -0.747424 0.613395
+0.251747 0.0415636 -0.785942 0.563198
+0.24849 0.0579397 -0.821094 0.510589
+0.244168 0.0740676 -0.85273 0.455794
+0.238801 0.0898784 -0.880714 0.399046
+0.232412 0.105304 -0.904928 0.340591
+0.225027 0.120279 -0.925266 0.280676
+0.216678 0.134739 -0.941642 0.21956
+0.207402 0.148622 -0.953986 0.157504
+0.197238 0.161869 -0.962244 0.0947728
+0.186229 0.174422 -0.966382 0.0316361
+0.174422 0.186229 -0.966382 -0.0316361
+0.161869 0.197238 -0.962244 -0.0947728
+0.148622 0.207402 -0.953986 -0.157504
+0.134739 0.216678 -0.941642 -0.21956
+0.120279 0.225027 -0.925266 -0.280676
+0.105304 0.232412 -0.904928 -0.340591
+0.0898784 0.238801 -0.880714 -0.399047
+0.0740676 0.244168 -0.85273 -0.455794
+0.0579397 0.24849 -0.821094 -0.510589
+0.0415636 0.251747 -0.785941 -0.563198
+0.0250096 0.253927 -0.747424 -0.613395
+0.00834842 0.255019 -0.705706 -0.660966
+-0.00834847 0.255019 -0.660965 -0.705706
+-0.0250096 0.253927 -0.613395 -0.747424
+-0.0415636 0.251747 -0.563198 -0.785942
+-0.0579397 0.24849 -0.510589 -0.821094
+-0.0740677 0.244168 -0.455793 -0.85273
+-0.0898785 0.238801 -0.399046 -0.880714
+-0.105304 0.232412 -0.34059 -0.904928
+-0.120279 0.225027 -0.280676 -0.925266
+-0.134739 0.216678 -0.21956 -0.941642
+-0.148622 0.207402 -0.157504 -0.953986
+-0.161869 0.197238 -0.0947728 -0.962244
+-0.174422 0.186229 -0.031636 -0.966382
+-0.186229 0.174422 0.031636 -0.966382
+-0.197238 0.161869 0.0947728 -0.962244
+-0.207402 0.148622 0.157504 -0.953986
+-0.216678 0.134739 0.21956 -0.941642
+-0.225027 0.120279 0.280676 -0.925266
+-0.232412 0.105304 0.340591 -0.904928
+-0.238801 0.0898784 0.399047 -0.880714
+-0.244168 0.0740676 0.455794 -0.85273
+-0.24849 0.0579397 0.510589 -0.821094
+-0.251747 0.0415636 0.563198 -0.785942
+-0.253927 0.0250096 0.613395 -0.747424
+-0.255019 0.00834847 0.660965 -0.705706
+0.306022 0.0100181 -0.773807 0.554502
+0.304712 0.0300115 -0.808416 0.502706
+0.302097 0.0498763 -0.839564 0.448756
+0.298188 0.0695276 -0.867117 0.392885
+0.293002 0.0888812 -0.890956 0.335332
+0.286561 0.107854 -0.91098 0.276343
+0.278894 0.126365 -0.927103 0.21617
+0.270032 0.144335 -0.939256 0.155072
+0.260014 0.161687 -0.947388 0.0933095
+0.248882 0.178347 -0.951462 0.0311476
+0.236685 0.194242 -0.951462 -0.0311476
+0.223474 0.209307 -0.947388 -0.0933096
+0.209307 0.223474 -0.939256 -0.155072
+0.194242 0.236685 -0.927103 -0.21617
+0.178347 0.248882 -0.91098 -0.276343
+0.161687 0.260014 -0.890956 -0.335332
+0.144335 0.270032 -0.867116 -0.392885
+0.126365 0.278894 -0.839564 -0.448756
+0.107854 0.286562 -0.808416 -0.502706
+0.0888812 0.293002 -0.773807 -0.554502
+0.0695276 0.298188 -0.735884 -0.603924
+0.0498763 0.302097 -0.69481 -0.650761
+0.0300115 0.304712 -0.65076 -0.69481
+0.0100181 0.306022 -0.603924 -0.735884
+-0.0100182 0.306022 -0.554502 -0.773807
+-0.0300115 0.304712 -0.502706 -0.808416
+-0.0498764 0.302097 -0.448756 -0.839564
+-0.0695276 0.298188 -0.392885 -0.867117
+-0.0888812 0.293002 -0.335332 -0.890956
+-0.107854 0.286561 -0.276343 -0.91098
+-0.126365 0.278894 -0.21617 -0.927103
+-0.144335 0.270032 -0.155072 -0.939256
+-0.161687 0.260014 -0.0933095 -0.947388
+-0.178347 0.248882 -0.0311475 -0.951462
+-0.194242 0.236685 0.0311476 -0.951462
+-0.209307 0.223474 0.0933096 -0.947388
+-0.223474 0.209307 0.155072 -0.939256
+-0.236685 0.194242 0.21617 -0.927103
+-0.248882 0.178347 0.276343 -0.91098
+-0.260014 0.161687 0.335332 -0.890956
+-0.270032 0.144335 0.392885 -0.867117
+-0.278894 0.126365 0.448756 -0.839564
+-0.286562 0.107854 0.502706 -0.808416
+-0.293002 0.0888812 0.554502 -0.773807
+-0.298188 0.0695276 0.603924 -0.735884
+-0.302097 0.0498764 0.65076 -0.69481
+-0.304712 0.0300115 0.69481 -0.65076
+-0.306022 0.0100182 0.735884 -0.603924
+0.306022 0.0100181 -0.603924 0.735884
+0.304712 0.0300115 -0.65076 0.69481
+0.302097 0.0498763 -0.69481 0.65076
+0.298188 0.0695276 -0.735884 0.603924
+0.293002 0.0888812 -0.773807 0.554502
+0.286561 0.107854 -0.808416 0.502706
+0.278894 0.126365 -0.839564 0.448756
+0.270032 0.144335 -0.867117 0.392885
+0.260014 0.161687 -0.890956 0.335332
+0.248882 0.178347 -0.91098 0.276343
+0.236685 0.194242 -0.927103 0.21617
+0.223474 0.209307 -0.939256 0.155072
+0.209307 0.223474 -0.947388 0.0933095
+0.194242 0.236685 -0.951462 0.0311476
+0.178347 0.248882 -0.951462 -0.0311477
+0.161687 0.260014 -0.947388 -0.0933096
+0.144335 0.270032 -0.939256 -0.155072
+0.126365 0.278894 -0.927103 -0.21617
+0.107854 0.286562 -0.91098 -0.276343
+0.0888812 0.293002 -0.890956 -0.335332
+0.0695276 0.298188 -0.867117 -0.392885
+0.0498763 0.302097 -0.839564 -0.448756
+0.0300115 0.304712 -0.808416 -0.502706
+0.0100181 0.306022 -0.773807 -0.554502
+-0.0100182 0.306022 -0.735884 -0.603924
+-0.0300115 0.304712 -0.69481 -0.650761
+-0.0498764 0.302097 -0.65076 -0.69481
+-0.0695276 0.298188 -0.603924 -0.735884
+-0.0888812 0.293002 -0.554502 -0.773807
+-0.107854 0.286561 -0.502705 -0.808416
+-0.126365 0.278894 -0.448756 -0.839564
+-0.144335 0.270032 -0.392885 -0.867116
+-0.161687 0.260014 -0.335332 -0.890956
+-0.178347 0.248882 -0.276343 -0.91098
+-0.194242 0.236685 -0.21617 -0.927103
+-0.209307 0.223474 -0.155072 -0.939256
+-0.223474 0.209307 -0.0933096 -0.947388
+-0.236685 0.194242 -0.0311476 -0.951462
+-0.248882 0.178347 0.0311476 -0.951462
+-0.260014 0.161687 0.0933096 -0.947388
+-0.270032 0.144335 0.155072 -0.939256
+-0.278894 0.126365 0.21617 -0.927103
+-0.286562 0.107854 0.276343 -0.91098
+-0.293002 0.0888812 0.335332 -0.890956
+-0.298188 0.0695276 0.392885 -0.867116
+-0.302097 0.0498764 0.448756 -0.839564
+-0.304712 0.0300115 0.502706 -0.808416
+-0.306022 0.0100182 0.554502 -0.773807
+0.357026 0.0116878 -0.681709 0.63849
+0.355497 0.0350134 -0.722008 0.592537
+0.352446 0.0581891 -0.759216 0.544047
+0.347886 0.0811156 -0.793173 0.493227
+0.341836 0.103695 -0.823733 0.440295
+0.334322 0.12583 -0.850766 0.385477
+0.325376 0.147426 -0.874156 0.329009
+0.315037 0.168391 -0.893803 0.271132
+0.30335 0.188635 -0.909622 0.212094
+0.290363 0.208071 -0.921546 0.152148
+0.276133 0.226616 -0.929524 0.0915501
+0.26072 0.244191 -0.933521 0.0305603
+0.244191 0.26072 -0.933521 -0.0305603
+0.226616 0.276133 -0.929524 -0.0915501
+0.208071 0.290363 -0.921546 -0.152148
+0.188635 0.30335 -0.909622 -0.212094
+0.168391 0.315038 -0.893803 -0.271132
+0.147426 0.325376 -0.874156 -0.329009
+0.12583 0.334322 -0.850766 -0.385477
+0.103695 0.341836 -0.823733 -0.440295
+0.0811155 0.347886 -0.793173 -0.493227
+0.058189 0.352446 -0.759216 -0.544047
+0.0350134 0.355497 -0.722008 -0.592537
+0.0116878 0.357026 -0.681709 -0.63849
+-0.0116879 0.357026 -0.63849 -0.681709
+-0.0350134 0.355497 -0.592537 -0.722008
+-0.0581891 0.352446 -0.544047 -0.759216
+-0.0811156 0.347886 -0.493227 -0.793173
+-0.103695 0.341836 -0.440294 -0.823733
+-0.12583 0.334322 -0.385477 -0.850766
+-0.147426 0.325376 -0.329009 -0.874156
+-0.168391 0.315038 -0.271132 -0.893803
+-0.188635 0.30335 -0.212094 -0.909622
+-0.208071 0.290363 -0.152148 -0.921546
+-0.226616 0.276133 -0.0915501 -0.929524
+-0.244191 0.26072 -0.0305603 -0.933521
+-0.26072 0.244191 0.0305603 -0.933521
+-0.276133 0.226616 0.0915501 -0.929524
+-0.290363 0.208071 0.152148 -0.921546
+-0.30335 0.188635 0.212094 -0.909622
+-0.315037 0.168391 0.271132 -0.893803
+-0.325376 0.147426 0.329009 -0.874156
+-0.334322 0.12583 0.385477 -0.850766
+-0.341836 0.103695 0.440295 -0.823733
+-0.347886 0.0811155 0.493227 -0.793173
+-0.352446 0.0581891 0.544047 -0.759216
+-0.355497 0.0350134 0.592537 -0.722008
+-0.357026 0.0116879 0.63849 -0.681709
+0.255019 0.00834844 -0.959434 0.119929
+0.253927 0.0250096 -0.965223 0.0569222
+0.251747 0.0415636 -0.966879 -0.0063283
+0.24849 0.0579397 -0.964395 -0.0695517
+0.244168 0.0740676 -0.957782 -0.132477
+0.238801 0.0898784 -0.947067 -0.194836
+0.232412 0.105304 -0.932296 -0.256359
+0.225027 0.120279 -0.913533 -0.316786
+0.216678 0.134739 -0.890858 -0.375855
+0.207402 0.148622 -0.864369 -0.433316
+0.197238 0.161869 -0.834178 -0.48892
+0.186229 0.174422 -0.800415 -0.542431
+0.174422 0.186229 -0.763225 -0.593619
+0.161869 0.197238 -0.722766 -0.642266
+0.148622 0.207402 -0.679212 -0.688162
+0.134739 0.216678 -0.63275 -0.731111
+0.120279 0.225027 -0.583578 -0.770929
+0.105304 0.232412 -0.531908 -0.807447
+0.0898784 0.238801 -0.477959 -0.840506
+0.0740676 0.244168 -0.421964 -0.869967
+0.0579397 0.24849 -0.364162 -0.895702
+0.0415636 0.251747 -0.304801 -0.917601
+0.0250096 0.253927 -0.244134 -0.935572
+0.00834842 0.255019 -0.182422 -0.949536
+-0.00834847 0.255019 -0.119929 -0.959434
+-0.0250096 0.253927 -0.0569221 -0.965223
+-0.0415636 0.251747 0.00632837 -0.966879
+-0.0579397 0.24849 0.0695517 -0.964395
+-0.0740677 0.244168 0.132477 -0.957782
+-0.0898785 0.238801 0.194836 -0.947066
+-0.105304 0.232412 0.25636 -0.932296
+-0.120279 0.225027 0.316786 -0.913533
+-0.134739 0.216678 0.375855 -0.890858
+-0.148622 0.207402 0.433316 -0.864369
+-0.161869 0.197238 0.48892 -0.834178
+-0.174422 0.186229 0.542431 -0.800415
+-0.186229 0.174422 0.593619 -0.763225
+-0.197238 0.161869 0.642266 -0.722766
+-0.207402 0.148622 0.688162 -0.679212
+-0.216678 0.134739 0.731111 -0.63275
+-0.225027 0.120279 0.770929 -0.583578
+-0.232412 0.105304 0.807447 -0.531908
+-0.238801 0.0898784 0.840506 -0.477959
+-0.244168 0.0740676 0.869967 -0.421964
+-0.24849 0.0579397 0.895702 -0.364162
+-0.251747 0.0415636 0.917601 -0.304801
+-0.253927 0.0250096 0.935572 -0.244134
+-0.255019 0.00834847 0.949536 -0.182422
+0.306022 0.0100181 -0.947388 0.0933095
+0.304712 0.0300115 -0.951462 0.0311476
+0.302097 0.0498763 -0.951462 -0.0311476
+0.298188 0.0695276 -0.947388 -0.0933096
+0.293002 0.0888812 -0.939256 -0.155072
+0.286561 0.107854 -0.927103 -0.21617
+0.278894 0.126365 -0.91098 -0.276343
+0.270032 0.144335 -0.890956 -0.335332
+0.260014 0.161687 -0.867117 -0.392885
+0.248882 0.178347 -0.839564 -0.448756
+0.236685 0.194242 -0.808416 -0.502706
+0.223474 0.209307 -0.773807 -0.554502
+0.209307 0.223474 -0.735884 -0.603924
+0.194242 0.236685 -0.69481 -0.65076
+0.178347 0.248882 -0.65076 -0.69481
+0.161687 0.260014 -0.603924 -0.735884
+0.144335 0.270032 -0.554502 -0.773807
+0.126365 0.278894 -0.502706 -0.808416
+0.107854 0.286562 -0.448756 -0.839564
+0.0888812 0.293002 -0.392885 -0.867117
+0.0695276 0.298188 -0.335332 -0.890956
+0.0498763 0.302097 -0.276343 -0.91098
+0.0300115 0.304712 -0.21617 -0.927103
+0.0100181 0.306022 -0.155072 -0.939256
+-0.0100182 0.306022 -0.0933094 -0.947388
+-0.0300115 0.304712 -0.0311476 -0.951462
+-0.0498764 0.302097 0.0311477 -0.951462
+-0.0695276 0.298188 0.0933096 -0.947388
+-0.0888812 0.293002 0.155072 -0.939256
+-0.107854 0.286561 0.21617 -0.927103
+-0.126365 0.278894 0.276343 -0.91098
+-0.144335 0.270032 0.335332 -0.890956
+-0.161687 0.260014 0.392885 -0.867117
+-0.178347 0.248882 0.448756 -0.839564
+-0.194242 0.236685 0.502706 -0.808416
+-0.209307 0.223474 0.554502 -0.773807
+-0.223474 0.209307 0.603924 -0.735884
+-0.236685 0.194242 0.650761 -0.69481
+-0.248882 0.178347 0.69481 -0.650761
+-0.260014 0.161687 0.735884 -0.603924
+-0.270032 0.144335 0.773807 -0.554502
+-0.278894 0.126365 0.808416 -0.502706
+-0.286562 0.107854 0.839564 -0.448756
+-0.293002 0.0888812 0.867117 -0.392885
+-0.298188 0.0695276 0.890956 -0.335332
+-0.302097 0.0498764 0.91098 -0.276343
+-0.304712 0.0300115 0.927103 -0.21617
+-0.306022 0.0100182 0.939256 -0.155072
+0.306022 0.0100181 -0.890956 0.335332
+0.304712 0.0300115 -0.91098 0.276343
+0.302097 0.0498763 -0.927103 0.21617
+0.298188 0.0695276 -0.939256 0.155072
+0.293002 0.0888812 -0.947388 0.0933095
+0.286561 0.107854 -0.951462 0.0311477
+0.278894 0.126365 -0.951462 -0.0311476
+0.270032 0.144335 -0.947388 -0.0933096
+0.260014 0.161687 -0.939256 -0.155072
+0.248882 0.178347 -0.927103 -0.21617
+0.236685 0.194242 -0.91098 -0.276343
+0.223474 0.209307 -0.890956 -0.335332
+0.209307 0.223474 -0.867117 -0.392885
+0.194242 0.236685 -0.839564 -0.448756
+0.178347 0.248882 -0.808416 -0.502706
+0.161687 0.260014 -0.773807 -0.554502
+0.144335 0.270032 -0.735884 -0.603924
+0.126365 0.278894 -0.69481 -0.650761
+0.107854 0.286562 -0.65076 -0.69481
+0.0888812 0.293002 -0.603924 -0.735884
+0.0695276 0.298188 -0.554502 -0.773807
+0.0498763 0.302097 -0.502706 -0.808416
+0.0300115 0.304712 -0.448756 -0.839564
+0.0100181 0.306022 -0.392885 -0.867117
+-0.0100182 0.306022 -0.335332 -0.890956
+-0.0300115 0.304712 -0.276343 -0.91098
+-0.0498764 0.302097 -0.21617 -0.927103
+-0.0695276 0.298188 -0.155072 -0.939256
+-0.0888812 0.293002 -0.0933094 -0.947388
+-0.107854 0.286561 -0.0311475 -0.951462
+-0.126365 0.278894 0.0311478 -0.951462
+-0.144335 0.270032 0.0933094 -0.947388
+-0.161687 0.260014 0.155072 -0.939256
+-0.178347 0.248882 0.21617 -0.927103
+-0.194242 0.236685 0.276343 -0.91098
+-0.209307 0.223474 0.335332 -0.890956
+-0.223474 0.209307 0.392885 -0.867117
+-0.236685 0.194242 0.448756 -0.839564
+-0.248882 0.178347 0.502706 -0.808416
+-0.260014 0.161687 0.554502 -0.773807
+-0.270032 0.144335 0.603924 -0.735884
+-0.278894 0.126365 0.65076 -0.69481
+-0.286562 0.107854 0.69481 -0.65076
+-0.293002 0.0888812 0.735884 -0.603924
+-0.298188 0.0695276 0.773807 -0.554502
+-0.302097 0.0498764 0.808416 -0.502706
+-0.304712 0.0300115 0.839564 -0.448756
+-0.306022 0.0100182 0.867116 -0.392885
+0.357026 0.0116878 -0.891229 0.279477
+0.355497 0.0350134 -0.907599 0.22059
+0.352446 0.0581891 -0.920083 0.160758
+0.347886 0.0811156 -0.928627 0.100237
+0.341836 0.103695 -0.933195 0.0392873
+0.334322 0.12583 -0.933766 -0.0218307
+0.325376 0.147426 -0.930339 -0.0828552
+0.315037 0.168391 -0.922928 -0.143525
+0.30335 0.188635 -0.911565 -0.20358
+0.290363 0.208071 -0.896299 -0.262763
+0.276133 0.226616 -0.877194 -0.320821
+0.26072 0.244191 -0.854333 -0.377506
+0.244191 0.26072 -0.827814 -0.432574
+0.226616 0.276133 -0.79775 -0.485789
+0.208071 0.290363 -0.76427 -0.536924
+0.188635 0.30335 -0.727517 -0.58576
+0.168391 0.315038 -0.687649 -0.632088
+0.147426 0.325376 -0.644836 -0.675709
+0.12583 0.334322 -0.599262 -0.716437
+0.103695 0.341836 -0.551121 -0.754096
+0.0811155 0.347886 -0.500621 -0.788527
+0.058189 0.352446 -0.447977 -0.819581
+0.0350134 0.355497 -0.393415 -0.847125
+0.0116878 0.357026 -0.337168 -0.871042
+-0.0116879 0.357026 -0.279477 -0.891229
+-0.0350134 0.355497 -0.22059 -0.907599
+-0.0581891 0.352446 -0.160757 -0.920083
+-0.0811156 0.347886 -0.100237 -0.928627
+-0.103695 0.341836 -0.0392871 -0.933195
+-0.12583 0.334322 0.0218308 -0.933766
+-0.147426 0.325376 0.0828553 -0.930339
+-0.168391 0.315038 0.143525 -0.922928
+-0.188635 0.30335 0.20358 -0.911565
+-0.208071 0.290363 0.262763 -0.896299
+-0.226616 0.276133 0.320821 -0.877194
+-0.244191 0.26072 0.377506 -0.854333
+-0.26072 0.244191 0.432574 -0.827814
+-0.276133 0.226616 0.485789 -0.79775
+-0.290363 0.208071 0.536924 -0.76427
+-0.30335 0.188635 0.58576 -0.727517
+-0.315037 0.168391 0.632088 -0.687649
+-0.325376 0.147426 0.675709 -0.644836
+-0.334322 0.12583 0.716437 -0.599262
+-0.341836 0.103695 0.754096 -0.551121
+-0.347886 0.0811155 0.788527 -0.500621
+-0.352446 0.0581891 0.819581 -0.447977
+-0.355497 0.0350134 0.847125 -0.393415
+-0.357026 0.0116879 0.871042 -0.337168
+0.357026 0.0116878 -0.931073 0.0741531
+0.355497 0.0350134 -0.933929 0.0130992
+0.352446 0.0581891 -0.932787 -0.0480108
+0.347886 0.0811156 -0.927649 -0.108915
+0.341836 0.103695 -0.91854 -0.169353
+0.334322 0.12583 -0.905497 -0.229066
+0.325376 0.147426 -0.888577 -0.287798
+0.315037 0.168391 -0.867851 -0.345297
+0.30335 0.188635 -0.843409 -0.401318
+0.290363 0.208071 -0.815356 -0.45562
+0.276133 0.226616 -0.783811 -0.507972
+0.26072 0.244191 -0.74891 -0.558148
+0.244191 0.26072 -0.710802 -0.605934
+0.226616 0.276133 -0.66965 -0.651125
+0.208071 0.290363 -0.625631 -0.693528
+0.188635 0.30335 -0.578932 -0.732962
+0.168391 0.315038 -0.529755 -0.769256
+0.147426 0.325376 -0.478309 -0.802257
+0.12583 0.334322 -0.424815 -0.831822
+0.103695 0.341836 -0.369501 -0.857825
+0.0811155 0.347886 -0.312606 -0.880155
+0.058189 0.352446 -0.254371 -0.898716
+0.0350134 0.355497 -0.195048 -0.913429
+0.0116878 0.357026 -0.134889 -0.92423
+-0.0116879 0.357026 -0.074153 -0.931073
+-0.0350134 0.355497 -0.0130991 -0.933929
+-0.0581891 0.352446 0.0480108 -0.932787
+-0.0811156 0.347886 0.108915 -0.927649
+-0.103695 0.341836 0.169353 -0.91854
+-0.12583 0.334322 0.229066 -0.905497
+-0.147426 0.325376 0.287798 -0.888577
+-0.168391 0.315038 0.345297 -0.867851
+-0.188635 0.30335 0.401318 -0.84341
+-0.208071 0.290363 0.455621 -0.815356
+-0.226616 0.276133 0.507972 -0.783812
+-0.244191 0.26072 0.558148 -0.74891
+-0.26072 0.244191 0.605934 -0.710802
+-0.276133 0.226616 0.651125 -0.66965
+-0.290363 0.208071 0.693528 -0.625631
+-0.30335 0.188635 0.732962 -0.578933
+-0.315037 0.168391 0.769256 -0.529755
+-0.325376 0.147426 0.802257 -0.478309
+-0.334322 0.12583 0.831822 -0.424815
+-0.341836 0.103695 0.857825 -0.369501
+-0.347886 0.0811155 0.880155 -0.312606
+-0.352446 0.0581891 0.898716 -0.254372
+-0.355497 0.0350134 0.913429 -0.195048
+-0.357026 0.0116879 0.92423 -0.134889
+0.40803 0.0133575 -0.910916 0.0597046
+0.406282 0.0400153 -0.912871 -2.49393e-09
+0.402795 0.0665018 -0.910916 -0.0597046
+0.397584 0.0927035 -0.905061 -0.119154
+0.390669 0.118508 -0.89533 -0.178092
+0.382082 0.143805 -0.881766 -0.236268
+0.371859 0.168487 -0.864425 -0.293433
+0.360043 0.192447 -0.843383 -0.349341
+0.346685 0.215583 -0.818729 -0.403752
+0.331843 0.237796 -0.790569 -0.456435
+0.31558 0.25899 -0.759024 -0.507164
+0.297966 0.279075 -0.724229 -0.555721
+0.279075 0.297966 -0.686333 -0.601898
+0.25899 0.31558 -0.645497 -0.645497
+0.237796 0.331843 -0.601898 -0.686333
+0.215583 0.346685 -0.555721 -0.724229
+0.192447 0.360043 -0.507164 -0.759024
+0.168487 0.371859 -0.456435 -0.790569
+0.143805 0.382082 -0.403752 -0.818729
+0.118508 0.390669 -0.349341 -0.843383
+0.0927035 0.397584 -0.293433 -0.864425
+0.0665017 0.402795 -0.236268 -0.881766
+0.0400153 0.406282 -0.178092 -0.89533
+0.0133575 0.40803 -0.119153 -0.905061
+-0.0133575 0.40803 -0.0597045 -0.910916
+-0.0400154 0.406282 7.64039e-08 -0.912871
+-0.0665018 0.402795 0.0597047 -0.910916
+-0.0927035 0.397584 0.119154 -0.905061
+-0.118508 0.390669 0.178092 -0.89533
+-0.143806 0.382082 0.236269 -0.881766
+-0.168487 0.371859 0.293433 -0.864425
+-0.192447 0.360043 0.34934 -0.843383
+-0.215583 0.346685 0.403752 -0.818729
+-0.237796 0.331843 0.456436 -0.790569
+-0.25899 0.31558 0.507164 -0.759024
+-0.279075 0.297966 0.555721 -0.724229
+-0.297966 0.279075 0.601898 -0.686333
+-0.31558 0.25899 0.645497 -0.645497
+-0.331843 0.237796 0.686333 -0.601898
+-0.346685 0.215583 0.724229 -0.555721
+-0.360043 0.192447 0.759024 -0.507164
+-0.371859 0.168487 0.790569 -0.456435
+-0.382082 0.143805 0.818729 -0.403752
+-0.390669 0.118508 0.843383 -0.349341
+-0.397584 0.0927035 0.864425 -0.293433
+-0.402795 0.0665018 0.881766 -0.236268
+-0.406282 0.0400153 0.89533 -0.178092
+-0.40803 0.0133575 0.905061 -0.119154
+0.40803 0.0133575 -0.881766 0.236268
+0.406282 0.0400153 -0.89533 0.178092
+0.402795 0.0665018 -0.905061 0.119154
+0.397584 0.0927035 -0.910916 0.0597046
+0.390669 0.118508 -0.912871 -6.80367e-10
+0.382082 0.143805 -0.910916 -0.0597046
+0.371859 0.168487 -0.905061 -0.119154
+0.360043 0.192447 -0.89533 -0.178092
+0.346685 0.215583 -0.881766 -0.236268
+0.331843 0.237796 -0.864425 -0.293433
+0.31558 0.25899 -0.843383 -0.349341
+0.297966 0.279075 -0.818729 -0.403753
+0.279075 0.297966 -0.790569 -0.456435
+0.25899 0.31558 -0.759024 -0.507164
+0.237796 0.331843 -0.724229 -0.555721
+0.215583 0.346685 -0.686333 -0.601898
+0.192447 0.360043 -0.645497 -0.645497
+0.168487 0.371859 -0.601898 -0.686333
+0.143805 0.382082 -0.555721 -0.724229
+0.118508 0.390669 -0.507164 -0.759024
+0.0927035 0.397584 -0.456435 -0.790569
+0.0665017 0.402795 -0.403752 -0.818729
+0.0400153 0.406282 -0.34934 -0.843383
+0.0133575 0.40803 -0.293433 -0.864425
+-0.0133575 0.40803 -0.236268 -0.881766
+-0.0400154 0.406282 -0.178092 -0.89533
+-0.0665018 0.402795 -0.119154 -0.905061
+-0.0927035 0.397584 -0.0597046 -0.910916
+-0.118508 0.390669 1.49406e-07 -0.912871
+-0.143806 0.382082 0.0597048 -0.910916
+-0.168487 0.371859 0.119154 -0.905061
+-0.192447 0.360043 0.178092 -0.89533
+-0.215583 0.346685 0.236268 -0.881766
+-0.237796 0.331843 0.293433 -0.864425
+-0.25899 0.31558 0.349341 -0.843383
+-0.279075 0.297966 0.403753 -0.818729
+-0.297966 0.279075 0.456435 -0.790569
+-0.31558 0.25899 0.507164 -0.759024
+-0.331843 0.237796 0.555721 -0.724229
+-0.346685 0.215583 0.601898 -0.686333
+-0.360043 0.192447 0.645497 -0.645497
+-0.371859 0.168487 0.686333 -0.601898
+-0.382082 0.143805 0.724229 -0.555721
+-0.390669 0.118508 0.759024 -0.507164
+-0.397584 0.0927035 0.790569 -0.456435
+-0.402795 0.0665018 0.818729 -0.403753
+-0.406282 0.0400153 0.843383 -0.349341
+-0.40803 0.0133575 0.864425 -0.293433
+0.456191 0.0149342 -0.877872 0.144937
+0.454238 0.0447385 -0.885472 0.0872114
+0.450339 0.0743513 -0.88928 0.029112
+0.444512 0.103646 -0.88928 -0.029112
+0.436782 0.132496 -0.885472 -0.0872114
+0.427181 0.160779 -0.877872 -0.144937
+0.415751 0.188374 -0.866513 -0.202043
+0.40254 0.215162 -0.851444 -0.258283
+0.387606 0.241029 -0.832728 -0.313417
+0.371012 0.265863 -0.810447 -0.367209
+0.352829 0.28956 -0.784695 -0.419428
+0.333136 0.312016 -0.755583 -0.469852
+0.312016 0.333136 -0.723236 -0.518263
+0.28956 0.352829 -0.687791 -0.564456
+0.265863 0.371012 -0.649401 -0.608231
+0.241029 0.387606 -0.608231 -0.649401
+0.215162 0.40254 -0.564456 -0.687791
+0.188374 0.415751 -0.518263 -0.723236
+0.160779 0.427181 -0.469852 -0.755583
+0.132496 0.436782 -0.419428 -0.784695
+0.103646 0.444512 -0.367209 -0.810447
+0.0743512 0.450339 -0.313417 -0.832728
+0.0447384 0.454238 -0.258283 -0.851444
+0.0149341 0.456191 -0.202042 -0.866513
+-0.0149342 0.456191 -0.144937 -0.877872
+-0.0447385 0.454238 -0.0872113 -0.885472
+-0.0743513 0.450339 -0.029112 -0.88928
+-0.103646 0.444512 0.0291121 -0.88928
+-0.132496 0.436781 0.0872115 -0.885472
+-0.160779 0.427181 0.144937 -0.877872
+-0.188374 0.415751 0.202043 -0.866513
+-0.215162 0.40254 0.258283 -0.851444
+-0.241029 0.387606 0.313417 -0.832728
+-0.265864 0.371012 0.367209 -0.810447
+-0.28956 0.352829 0.419428 -0.784695
+-0.312016 0.333136 0.469852 -0.755583
+-0.333136 0.312016 0.518263 -0.723236
+-0.352829 0.28956 0.564456 -0.687791
+-0.371012 0.265864 0.608231 -0.649401
+-0.387606 0.241029 0.649401 -0.608231
+-0.40254 0.215162 0.687791 -0.564456
+-0.415751 0.188374 0.723236 -0.518263
+-0.427181 0.160779 0.755583 -0.469852
+-0.436782 0.132496 0.784695 -0.419428
+-0.444512 0.103646 0.810447 -0.367209
+-0.450339 0.0743513 0.832728 -0.313417
+-0.454238 0.0447385 0.851444 -0.258283
+-0.456191 0.0149342 0.866513 -0.202043
+0.357026 0.0116878 -0.806694 0.470787
+0.355497 0.0350134 -0.835758 0.417019
+0.352446 0.0581891 -0.861243 0.361465
+0.347886 0.0811156 -0.88304 0.304363
+0.341836 0.103695 -0.901055 0.245958
+0.334322 0.12583 -0.915212 0.186499
+0.325376 0.147426 -0.925451 0.126242
+0.315037 0.168391 -0.931726 0.0654444
+0.30335 0.188635 -0.934011 0.00436652
+0.290363 0.208071 -0.932297 -0.0567301
+0.276133 0.226616 -0.92659 -0.117584
+0.26072 0.244191 -0.916916 -0.177934
+0.244191 0.26072 -0.903316 -0.237522
+0.226616 0.276133 -0.885847 -0.296093
+0.208071 0.290363 -0.864585 -0.353396
+0.188635 0.30335 -0.83962 -0.409186
+0.168391 0.315038 -0.811061 -0.463224
+0.147426 0.325376 -0.779028 -0.515278
+0.12583 0.334322 -0.743659 -0.565126
+0.103695 0.341836 -0.705106 -0.612553
+0.0811155 0.347886 -0.663533 -0.657358
+0.058189 0.352446 -0.619119 -0.699348
+0.0350134 0.355497 -0.572054 -0.738343
+0.0116878 0.357026 -0.522539 -0.774176
+-0.0116879 0.357026 -0.470787 -0.806694
+-0.0350134 0.355497 -0.417019 -0.835758
+-0.0581891 0.352446 -0.361465 -0.861243
+-0.0811156 0.347886 -0.304363 -0.88304
+-0.103695 0.341836 -0.245957 -0.901055
+-0.12583 0.334322 -0.186499 -0.915213
+-0.147426 0.325376 -0.126242 -0.925451
+-0.168391 0.315038 -0.0654445 -0.931726
+-0.188635 0.30335 -0.00436653 -0.934011
+-0.208071 0.290363 0.0567302 -0.932297
+-0.226616 0.276133 0.117584 -0.92659
+-0.244191 0.26072 0.177934 -0.916916
+-0.26072 0.244191 0.237522 -0.903316
+-0.276133 0.226616 0.296093 -0.885847
+-0.290363 0.208071 0.353396 -0.864585
+-0.30335 0.188635 0.409186 -0.83962
+-0.315037 0.168391 0.463224 -0.811061
+-0.325376 0.147426 0.515278 -0.779028
+-0.334322 0.12583 0.565126 -0.743659
+-0.341836 0.103695 0.612553 -0.705106
+-0.347886 0.0811155 0.657358 -0.663533
+-0.352446 0.0581891 0.699348 -0.619119
+-0.355497 0.0350134 0.738343 -0.572054
+-0.357026 0.0116879 0.774176 -0.522539
+0.40803 0.0133575 -0.818729 0.403752
+0.406282 0.0400153 -0.843383 0.349341
+0.402795 0.0665018 -0.864425 0.293433
+0.397584 0.0927035 -0.881766 0.236268
+0.390669 0.118508 -0.89533 0.178092
+0.382082 0.143805 -0.905061 0.119154
+0.371859 0.168487 -0.910916 0.0597046
+0.360043 0.192447 -0.912871 -1.92711e-08
+0.346685 0.215583 -0.910916 -0.0597046
+0.331843 0.237796 -0.905061 -0.119154
+0.31558 0.25899 -0.89533 -0.178092
+0.297966 0.279075 -0.881766 -0.236268
+0.279075 0.297966 -0.864425 -0.293433
+0.25899 0.31558 -0.843383 -0.349341
+0.237796 0.331843 -0.818729 -0.403753
+0.215583 0.346685 -0.790569 -0.456436
+0.192447 0.360043 -0.759024 -0.507164
+0.168487 0.371859 -0.724229 -0.555721
+0.143805 0.382082 -0.686333 -0.601898
+0.118508 0.390669 -0.645497 -0.645497
+0.0927035 0.397584 -0.601898 -0.686333
+0.0665017 0.402795 -0.555721 -0.724229
+0.0400153 0.406282 -0.507164 -0.759024
+0.0133575 0.40803 -0.456435 -0.790569
+-0.0133575 0.40803 -0.403752 -0.818729
+-0.0400154 0.406282 -0.349341 -0.843383
+-0.0665018 0.402795 -0.293433 -0.864425
+-0.0927035 0.397584 -0.236268 -0.881766
+-0.118508 0.390669 -0.178092 -0.89533
+-0.143806 0.382082 -0.119153 -0.905061
+-0.168487 0.371859 -0.0597045 -0.910916
+-0.192447 0.360043 -1.0406e-07 -0.912871
+-0.215583 0.346685 0.0597046 -0.910916
+-0.237796 0.331843 0.119154 -0.905061
+-0.25899 0.31558 0.178092 -0.89533
+-0.279075 0.297966 0.236268 -0.881766
+-0.297966 0.279075 0.293433 -0.864425
+-0.31558 0.25899 0.349341 -0.843383
+-0.331843 0.237796 0.403752 -0.818729
+-0.346685 0.215583 0.456435 -0.790569
+-0.360043 0.192447 0.507164 -0.759024
+-0.371859 0.168487 0.555721 -0.724229
+-0.382082 0.143805 0.601898 -0.686333
+-0.390669 0.118508 0.645497 -0.645497
+-0.397584 0.0927035 0.686333 -0.601898
+-0.402795 0.0665018 0.724229 -0.555721
+-0.406282 0.0400153 0.759024 -0.507164
+-0.40803 0.0133575 0.790569 -0.456436
+0.40803 0.0133575 -0.724229 0.555721
+0.406282 0.0400153 -0.759024 0.507164
+0.402795 0.0665018 -0.790569 0.456435
+0.397584 0.0927035 -0.818729 0.403752
+0.390669 0.118508 -0.843383 0.349341
+0.382082 0.143805 -0.864425 0.293433
+0.371859 0.168487 -0.881766 0.236268
+0.360043 0.192447 -0.89533 0.178092
+0.346685 0.215583 -0.905061 0.119154
+0.331843 0.237796 -0.910916 0.0597046
+0.31558 0.25899 -0.912871 1.65496e-08
+0.297966 0.279075 -0.910916 -0.0597046
+0.279075 0.297966 -0.905061 -0.119154
+0.25899 0.31558 -0.89533 -0.178092
+0.237796 0.331843 -0.881766 -0.236268
+0.215583 0.346685 -0.864425 -0.293433
+0.192447 0.360043 -0.843383 -0.349341
+0.168487 0.371859 -0.818729 -0.403753
+0.143805 0.382082 -0.790569 -0.456436
+0.118508 0.390669 -0.759024 -0.507164
+0.0927035 0.397584 -0.724229 -0.555721
+0.0665017 0.402795 -0.686333 -0.601898
+0.0400153 0.406282 -0.645497 -0.645497
+0.0133575 0.40803 -0.601898 -0.686333
+-0.0133575 0.40803 -0.555721 -0.724229
+-0.0400154 0.406282 -0.507164 -0.759024
+-0.0665018 0.402795 -0.456435 -0.790569
+-0.0927035 0.397584 -0.403752 -0.818729
+-0.118508 0.390669 -0.34934 -0.843383
+-0.143806 0.382082 -0.293433 -0.864425
+-0.168487 0.371859 -0.236268 -0.881766
+-0.192447 0.360043 -0.178092 -0.89533
+-0.215583 0.346685 -0.119154 -0.905061
+-0.237796 0.331843 -0.0597045 -0.910916
+-0.25899 0.31558 -3.10581e-08 -0.912871
+-0.279075 0.297966 0.0597047 -0.910916
+-0.297966 0.279075 0.119154 -0.905061
+-0.31558 0.25899 0.178092 -0.89533
+-0.331843 0.237796 0.236268 -0.881766
+-0.346685 0.215583 0.293433 -0.864425
+-0.360043 0.192447 0.34934 -0.843383
+-0.371859 0.168487 0.403752 -0.818729
+-0.382082 0.143805 0.456436 -0.790569
+-0.390669 0.118508 0.507164 -0.759024
+-0.397584 0.0927035 0.555721 -0.724229
+-0.402795 0.0665018 0.601898 -0.686333
+-0.406282 0.0400153 0.645497 -0.645497
+-0.40803 0.0133575 0.686333 -0.601898
+0.456191 0.0149342 -0.755583 0.469852
+0.454238 0.0447385 -0.784695 0.419428
+0.450339 0.0743513 -0.810447 0.367209
+0.444512 0.103646 -0.832728 0.313417
+0.436782 0.132496 -0.851444 0.258283
+0.427181 0.160779 -0.866513 0.202043
+0.415751 0.188374 -0.877872 0.144937
+0.40254 0.215162 -0.885472 0.0872114
+0.387606 0.241029 -0.88928 0.029112
+0.371012 0.265863 -0.88928 -0.029112
+0.352829 0.28956 -0.885472 -0.0872114
+0.333136 0.312016 -0.877872 -0.144937
+0.312016 0.333136 -0.866513 -0.202043
+0.28956 0.352829 -0.851444 -0.258283
+0.265863 0.371012 -0.832728 -0.313417
+0.241029 0.387606 -0.810447 -0.367209
+0.215162 0.40254 -0.784695 -0.419428
+0.188374 0.415751 -0.755583 -0.469852
+0.160779 0.427181 -0.723236 -0.518263
+0.132496 0.436782 -0.687791 -0.564456
+0.103646 0.444512 -0.649401 -0.608231
+0.0743512 0.450339 -0.608231 -0.649401
+0.0447384 0.454238 -0.564455 -0.687791
+0.0149341 0.456191 -0.518263 -0.723236
+-0.0149342 0.456191 -0.469852 -0.755583
+-0.0447385 0.454238 -0.419428 -0.784695
+-0.0743513 0.450339 -0.367209 -0.810447
+-0.103646 0.444512 -0.313417 -0.832728
+-0.132496 0.436781 -0.258283 -0.851444
+-0.160779 0.427181 -0.202042 -0.866513
+-0.188374 0.415751 -0.144937 -0.877872
+-0.215162 0.40254 -0.0872115 -0.885472
+-0.241029 0.387606 -0.029112 -0.88928
+-0.265864 0.371012 0.0291121 -0.88928
+-0.28956 0.352829 0.0872114 -0.885472
+-0.312016 0.333136 0.144937 -0.877872
+-0.333136 0.312016 0.202043 -0.866513
+-0.352829 0.28956 0.258283 -0.851444
+-0.371012 0.265864 0.313417 -0.832728
+-0.387606 0.241029 0.367209 -0.810447
+-0.40254 0.215162 0.419428 -0.784695
+-0.415751 0.188374 0.469852 -0.755583
+-0.427181 0.160779 0.518263 -0.723236
+-0.436782 0.132496 0.564456 -0.687791
+-0.444512 0.103646 0.608231 -0.649401
+-0.450339 0.0743513 0.649401 -0.608231
+-0.454238 0.0447385 0.687791 -0.564456
+-0.456191 0.0149342 0.723236 -0.518263
+0.456191 0.0149342 -0.832728 0.313417
+0.454238 0.0447385 -0.851444 0.258283
+0.450339 0.0743513 -0.866513 0.202043
+0.444512 0.103646 -0.877872 0.144937
+0.436782 0.132496 -0.885472 0.0872114
+0.427181 0.160779 -0.88928 0.029112
+0.415751 0.188374 -0.88928 -0.029112
+0.40254 0.215162 -0.885472 -0.0872114
+0.387606 0.241029 -0.877872 -0.144937
+0.371012 0.265863 -0.866513 -0.202043
+0.352829 0.28956 -0.851444 -0.258283
+0.333136 0.312016 -0.832728 -0.313417
+0.312016 0.333136 -0.810447 -0.367209
+0.28956 0.352829 -0.784695 -0.419428
+0.265863 0.371012 -0.755583 -0.469852
+0.241029 0.387606 -0.723236 -0.518263
+0.215162 0.40254 -0.687791 -0.564456
+0.188374 0.415751 -0.649401 -0.608231
+0.160779 0.427181 -0.608231 -0.649401
+0.132496 0.436782 -0.564456 -0.687791
+0.103646 0.444512 -0.518263 -0.723236
+0.0743512 0.450339 -0.469852 -0.755583
+0.0447384 0.454238 -0.419428 -0.784695
+0.0149341 0.456191 -0.367209 -0.810447
+-0.0149342 0.456191 -0.313417 -0.832728
+-0.0447385 0.454238 -0.258283 -0.851444
+-0.0743513 0.450339 -0.202043 -0.866513
+-0.103646 0.444512 -0.144937 -0.877872
+-0.132496 0.436781 -0.0872112 -0.885472
+-0.160779 0.427181 -0.0291119 -0.88928
+-0.188374 0.415751 0.0291121 -0.88928
+-0.215162 0.40254 0.0872113 -0.885472
+-0.241029 0.387606 0.144937 -0.877872
+-0.265864 0.371012 0.202043 -0.866513
+-0.28956 0.352829 0.258283 -0.851444
+-0.312016 0.333136 0.313417 -0.832728
+-0.333136 0.312016 0.367209 -0.810447
+-0.352829 0.28956 0.419428 -0.784695
+-0.371012 0.265864 0.469852 -0.755583
+-0.387606 0.241029 0.518263 -0.723236
+-0.40254 0.215162 0.564455 -0.687791
+-0.415751 0.188374 0.608231 -0.649401
+-0.427181 0.160779 0.649401 -0.608231
+-0.436782 0.132496 0.687791 -0.564456
+-0.444512 0.103646 0.723236 -0.518263
+-0.450339 0.0743513 0.755583 -0.469852
+-0.454238 0.0447385 0.784695 -0.419428
+-0.456191 0.0149342 0.810447 -0.367209
+0.499732 0.0163595 -0.836516 0.224144
+0.497592 0.0490086 -0.849385 0.168953
+0.493322 0.0814477 -0.858616 0.113039
+0.486938 0.113538 -0.864171 0.0566408
+0.47847 0.145142 -0.866025 -6.45453e-10
+0.467953 0.176125 -0.864171 -0.0566408
+0.455432 0.206354 -0.858616 -0.113039
+0.440961 0.235698 -0.849385 -0.168953
+0.424601 0.264034 -0.836516 -0.224144
+0.406423 0.291239 -0.820066 -0.278375
+0.386505 0.317197 -0.800103 -0.331414
+0.364932 0.341796 -0.776715 -0.383033
+0.341796 0.364932 -0.75 -0.433013
+0.317197 0.386505 -0.720074 -0.481138
+0.291239 0.406423 -0.687064 -0.527203
+0.264034 0.424601 -0.651112 -0.57101
+0.235698 0.440961 -0.612372 -0.612372
+0.206353 0.455432 -0.57101 -0.651112
+0.176125 0.467953 -0.527203 -0.687064
+0.145142 0.47847 -0.481138 -0.720074
+0.113538 0.486938 -0.433013 -0.75
+0.0814477 0.493322 -0.383033 -0.776715
+0.0490085 0.497592 -0.331413 -0.800103
+0.0163595 0.499732 -0.278375 -0.820066
+-0.0163596 0.499732 -0.224144 -0.836516
+-0.0490086 0.497592 -0.168953 -0.849385
+-0.0814478 0.493322 -0.113039 -0.858616
+-0.113538 0.486938 -0.0566407 -0.864171
+-0.145142 0.47847 1.41739e-07 -0.866025
+-0.176125 0.467953 0.0566409 -0.864171
+-0.206354 0.455432 0.113039 -0.858616
+-0.235698 0.440961 0.168953 -0.849385
+-0.264034 0.424601 0.224144 -0.836516
+-0.291239 0.406423 0.278375 -0.820066
+-0.317197 0.386505 0.331414 -0.800103
+-0.341796 0.364932 0.383033 -0.776715
+-0.364932 0.341796 0.433013 -0.75
+-0.386505 0.317197 0.481138 -0.720074
+-0.406423 0.291239 0.527203 -0.687064
+-0.424601 0.264034 0.57101 -0.651112
+-0.440961 0.235698 0.612372 -0.612373
+-0.455432 0.206354 0.651112 -0.57101
+-0.467953 0.176125 0.687064 -0.527203
+-0.47847 0.145142 0.720074 -0.481138
+-0.486938 0.113538 0.75 -0.433013
+-0.493322 0.0814478 0.776715 -0.383033
+-0.497592 0.0490085 0.800103 -0.331414
+-0.499732 0.0163596 0.820066 -0.278375
+0.499732 0.0163595 -0.776715 0.383033
+0.497592 0.0490086 -0.800103 0.331414
+0.493322 0.0814477 -0.820066 0.278375
+0.486938 0.113538 -0.836516 0.224144
+0.47847 0.145142 -0.849385 0.168953
+0.467953 0.176125 -0.858616 0.113039
+0.455432 0.206354 -0.864171 0.0566408
+0.440961 0.235698 -0.866025 -1.82821e-08
+0.424601 0.264034 -0.864171 -0.0566408
+0.406423 0.291239 -0.858616 -0.113039
+0.386505 0.317197 -0.849385 -0.168953
+0.364932 0.341796 -0.836516 -0.224144
+0.341796 0.364932 -0.820066 -0.278375
+0.317197 0.386505 -0.800103 -0.331414
+0.291239 0.406423 -0.776715 -0.383033
+0.264034 0.424601 -0.75 -0.433013
+0.235698 0.440961 -0.720074 -0.481138
+0.206353 0.455432 -0.687064 -0.527203
+0.176125 0.467953 -0.651112 -0.57101
+0.145142 0.47847 -0.612372 -0.612372
+0.113538 0.486938 -0.57101 -0.651112
+0.0814477 0.493322 -0.527203 -0.687064
+0.0490085 0.497592 -0.481138 -0.720074
+0.0163595 0.499732 -0.433013 -0.75
+-0.0163596 0.499732 -0.383033 -0.776715
+-0.0490086 0.497592 -0.331414 -0.800103
+-0.0814478 0.493322 -0.278375 -0.820066
+-0.113538 0.486938 -0.224144 -0.836516
+-0.145142 0.47847 -0.168953 -0.849385
+-0.176125 0.467953 -0.113039 -0.858616
+-0.206354 0.455432 -0.0566407 -0.864171
+-0.235698 0.440961 -9.87201e-08 -0.866025
+-0.264034 0.424601 0.0566408 -0.864171
+-0.291239 0.406423 0.113039 -0.858616
+-0.317197 0.386505 0.168953 -0.849385
+-0.341796 0.364932 0.224144 -0.836516
+-0.364932 0.341796 0.278375 -0.820066
+-0.386505 0.317197 0.331414 -0.800103
+-0.406423 0.291239 0.383033 -0.776715
+-0.424601 0.264034 0.433013 -0.75
+-0.440961 0.235698 0.481138 -0.720074
+-0.455432 0.206354 0.527203 -0.687064
+-0.467953 0.176125 0.57101 -0.651112
+-0.47847 0.145142 0.612372 -0.612372
+-0.486938 0.113538 0.651112 -0.57101
+-0.493322 0.0814478 0.687064 -0.527203
+-0.497592 0.0490085 0.720074 -0.481138
+-0.499732 0.0163596 0.75 -0.433013
+0.539773 0.0176703 -0.787682 0.296463
+0.537461 0.0529353 -0.805385 0.244311
+0.532848 0.0879736 -0.81964 0.191113
+0.525954 0.122635 -0.830384 0.137097
+0.516807 0.156772 -0.837573 0.0824937
+0.505447 0.190237 -0.841175 0.0275372
+0.491923 0.222887 -0.841175 -0.0275372
+0.476292 0.254583 -0.837573 -0.0824937
+0.458622 0.285189 -0.830384 -0.137097
+0.438987 0.314574 -0.81964 -0.191113
+0.417473 0.342612 -0.805385 -0.244311
+0.394172 0.369182 -0.787682 -0.296463
+0.369182 0.394172 -0.766606 -0.347345
+0.342612 0.417473 -0.742247 -0.396739
+0.314574 0.438987 -0.71471 -0.444435
+0.285189 0.458622 -0.684112 -0.490228
+0.254583 0.476292 -0.650585 -0.533922
+0.222887 0.491923 -0.614272 -0.575329
+0.190237 0.505447 -0.575329 -0.614272
+0.156772 0.516807 -0.533921 -0.650585
+0.122635 0.525954 -0.490228 -0.684112
+0.0879735 0.532848 -0.444435 -0.71471
+0.0529352 0.537461 -0.396739 -0.742247
+0.0176703 0.539773 -0.347345 -0.766606
+-0.0176704 0.539773 -0.296463 -0.787682
+-0.0529354 0.537461 -0.244311 -0.805385
+-0.0879736 0.532848 -0.191113 -0.81964
+-0.122635 0.525954 -0.137097 -0.830384
+-0.156772 0.516807 -0.0824936 -0.837573
+-0.190237 0.505447 -0.0275371 -0.841175
+-0.222887 0.491923 0.0275373 -0.841175
+-0.254583 0.476292 0.0824936 -0.837573
+-0.285189 0.458622 0.137097 -0.830384
+-0.314574 0.438987 0.191113 -0.81964
+-0.342612 0.417473 0.244311 -0.805385
+-0.369182 0.394172 0.296463 -0.787682
+-0.394172 0.369182 0.347345 -0.766606
+-0.417473 0.342612 0.39674 -0.742247
+-0.438987 0.314574 0.444435 -0.71471
+-0.458622 0.285189 0.490228 -0.684112
+-0.476292 0.254583 0.533921 -0.650585
+-0.491923 0.222887 0.575329 -0.614272
+-0.505447 0.190237 0.614272 -0.575329
+-0.516807 0.156772 0.650585 -0.533922
+-0.525954 0.122635 0.684112 -0.490228
+-0.532848 0.0879736 0.71471 -0.444435
+-0.537461 0.0529353 0.742247 -0.396739
+-0.539773 0.0176704 0.766606 -0.347345
+0.255019 0.00834844 -0.182422 0.949536
+0.253927 0.0250096 -0.244134 0.935572
+0.251747 0.0415636 -0.304801 0.917601
+0.24849 0.0579397 -0.364162 0.895702
+0.244168 0.0740676 -0.421964 0.869967
+0.238801 0.0898784 -0.477959 0.840506
+0.232412 0.105304 -0.531908 0.807447
+0.225027 0.120279 -0.583578 0.770929
+0.216678 0.134739 -0.63275 0.731111
+0.207402 0.148622 -0.679212 0.688162
+0.197238 0.161869 -0.722766 0.642266
+0.186229 0.174422 -0.763225 0.593619
+0.174422 0.186229 -0.800415 0.542431
+0.161869 0.197238 -0.834178 0.48892
+0.148622 0.207402 -0.864369 0.433315
+0.134739 0.216678 -0.890858 0.375855
+0.120279 0.225027 -0.913533 0.316786
+0.105304 0.232412 -0.932296 0.256359
+0.0898784 0.238801 -0.947067 0.194835
+0.0740676 0.244168 -0.957782 0.132477
+0.0579397 0.24849 -0.964395 0.0695517
+0.0415636 0.251747 -0.966879 0.00632817
+0.0250096 0.253927 -0.965223 -0.0569223
+0.00834842 0.255019 -0.959434 -0.119929
+-0.00834847 0.255019 -0.949536 -0.182422
+-0.0250096 0.253927 -0.935572 -0.244134
+-0.0415636 0.251747 -0.917601 -0.304801
+-0.0579397 0.24849 -0.895702 -0.364162
+-0.0740677 0.244168 -0.869967 -0.421964
+-0.0898785 0.238801 -0.840506 -0.477959
+-0.105304 0.232412 -0.807447 -0.531908
+-0.120279 0.225027 -0.770929 -0.583578
+-0.134739 0.216678 -0.731111 -0.63275
+-0.148622 0.207402 -0.688162 -0.679212
+-0.161869 0.197238 -0.642266 -0.722766
+-0.174422 0.186229 -0.593619 -0.763225
+-0.186229 0.174422 -0.542431 -0.800415
+-0.197238 0.161869 -0.48892 -0.834178
+-0.207402 0.148622 -0.433316 -0.864369
+-0.216678 0.134739 -0.375855 -0.890858
+-0.225027 0.120279 -0.316786 -0.913533
+-0.232412 0.105304 -0.256359 -0.932296
+-0.238801 0.0898784 -0.194835 -0.947067
+-0.244168 0.0740676 -0.132477 -0.957782
+-0.24849 0.0579397 -0.0695516 -0.964395
+-0.251747 0.0415636 -0.00632836 -0.966879
+-0.253927 0.0250096 0.0569222 -0.965223
+-0.255019 0.00834847 0.119929 -0.959434
+0.306022 0.0100181 -0.392885 0.867117
+0.304712 0.0300115 -0.448756 0.839564
+0.302097 0.0498763 -0.502706 0.808416
+0.298188 0.0695276 -0.554502 0.773807
+0.293002 0.0888812 -0.603924 0.735884
+0.286561 0.107854 -0.65076 0.69481
+0.278894 0.126365 -0.69481 0.65076
+0.270032 0.144335 -0.735884 0.603924
+0.260014 0.161687 -0.773807 0.554502
+0.248882 0.178347 -0.808416 0.502706
+0.236685 0.194242 -0.839564 0.448756
+0.223474 0.209307 -0.867117 0.392885
+0.209307 0.223474 -0.890956 0.335332
+0.194242 0.236685 -0.91098 0.276343
+0.178347 0.248882 -0.927103 0.21617
+0.161687 0.260014 -0.939256 0.155072
+0.144335 0.270032 -0.947388 0.0933095
+0.126365 0.278894 -0.951462 0.0311476
+0.107854 0.286562 -0.951462 -0.0311477
+0.0888812 0.293002 -0.947388 -0.0933096
+0.0695276 0.298188 -0.939256 -0.155072
+0.0498763 0.302097 -0.927103 -0.21617
+0.0300115 0.304712 -0.91098 -0.276343
+0.0100181 0.306022 -0.890956 -0.335332
+-0.0100182 0.306022 -0.867116 -0.392885
+-0.0300115 0.304712 -0.839564 -0.448756
+-0.0498764 0.302097 -0.808416 -0.502706
+-0.0695276 0.298188 -0.773807 -0.554502
+-0.0888812 0.293002 -0.735884 -0.603925
+-0.107854 0.286561 -0.69481 -0.650761
+-0.126365 0.278894 -0.65076 -0.69481
+-0.144335 0.270032 -0.603924 -0.735884
+-0.161687 0.260014 -0.554502 -0.773807
+-0.178347 0.248882 -0.502706 -0.808416
+-0.194242 0.236685 -0.448756 -0.839564
+-0.209307 0.223474 -0.392885 -0.867117
+-0.223474 0.209307 -0.335332 -0.890956
+-0.236685 0.194242 -0.276343 -0.91098
+-0.248882 0.178347 -0.21617 -0.927103
+-0.260014 0.161687 -0.155072 -0.939256
+-0.270032 0.144335 -0.0933096 -0.947388
+-0.278894 0.126365 -0.0311476 -0.951462
+-0.286562 0.107854 0.0311477 -0.951462
+-0.293002 0.0888812 0.0933095 -0.947388
+-0.298188 0.0695276 0.155072 -0.939256
+-0.302097 0.0498764 0.21617 -0.927103
+-0.304712 0.0300115 0.276343 -0.91098
+-0.306022 0.0100182 0.335332 -0.890956
+0.306022 0.0100181 -0.155072 0.939256
+0.304712 0.0300115 -0.21617 0.927103
+0.302097 0.0498763 -0.276343 0.91098
+0.298188 0.0695276 -0.335332 0.890956
+0.293002 0.0888812 -0.392885 0.867117
+0.286561 0.107854 -0.448756 0.839564
+0.278894 0.126365 -0.502706 0.808416
+0.270032 0.144335 -0.554502 0.773807
+0.260014 0.161687 -0.603924 0.735884
+0.248882 0.178347 -0.65076 0.69481
+0.236685 0.194242 -0.69481 0.65076
+0.223474 0.209307 -0.735884 0.603924
+0.209307 0.223474 -0.773807 0.554502
+0.194242 0.236685 -0.808416 0.502706
+0.178347 0.248882 -0.839564 0.448756
+0.161687 0.260014 -0.867117 0.392885
+0.144335 0.270032 -0.890956 0.335332
+0.126365 0.278894 -0.91098 0.276343
+0.107854 0.286562 -0.927103 0.21617
+0.0888812 0.293002 -0.939256 0.155072
+0.0695276 0.298188 -0.947388 0.0933095
+0.0498763 0.302097 -0.951462 0.0311475
+0.0300115 0.304712 -0.951462 -0.0311478
+0.0100181 0.306022 -0.947388 -0.0933096
+-0.0100182 0.306022 -0.939256 -0.155072
+-0.0300115 0.304712 -0.927103 -0.21617
+-0.0498764 0.302097 -0.91098 -0.276343
+-0.0695276 0.298188 -0.890956 -0.335332
+-0.0888812 0.293002 -0.867116 -0.392886
+-0.107854 0.286561 -0.839564 -0.448756
+-0.126365 0.278894 -0.808416 -0.502706
+-0.144335 0.270032 -0.773807 -0.554502
+-0.161687 0.260014 -0.735884 -0.603924
+-0.178347 0.248882 -0.69481 -0.650761
+-0.194242 0.236685 -0.650761 -0.69481
+-0.209307 0.223474 -0.603924 -0.735884
+-0.223474 0.209307 -0.554502 -0.773807
+-0.236685 0.194242 -0.502706 -0.808416
+-0.248882 0.178347 -0.448756 -0.839564
+-0.260014 0.161687 -0.392885 -0.867117
+-0.270032 0.144335 -0.335332 -0.890956
+-0.278894 0.126365 -0.276343 -0.91098
+-0.286562 0.107854 -0.21617 -0.927103
+-0.293002 0.0888812 -0.155072 -0.939256
+-0.298188 0.0695276 -0.0933095 -0.947388
+-0.302097 0.0498764 -0.0311477 -0.951462
+-0.304712 0.0300115 0.0311477 -0.951462
+-0.306022 0.0100182 0.0933095 -0.947388
+0.357026 0.0116878 -0.337168 0.871042
+0.355497 0.0350134 -0.393415 0.847125
+0.352446 0.0581891 -0.447977 0.819581
+0.347886 0.0811156 -0.500621 0.788527
+0.341836 0.103695 -0.551121 0.754096
+0.334322 0.12583 -0.599262 0.716437
+0.325376 0.147426 -0.644836 0.675709
+0.315037 0.168391 -0.687649 0.632088
+0.30335 0.188635 -0.727517 0.58576
+0.290363 0.208071 -0.76427 0.536924
+0.276133 0.226616 -0.79775 0.485789
+0.26072 0.244191 -0.827814 0.432574
+0.244191 0.26072 -0.854333 0.377506
+0.226616 0.276133 -0.877194 0.320821
+0.208071 0.290363 -0.896299 0.262763
+0.188635 0.30335 -0.911565 0.20358
+0.168391 0.315038 -0.922928 0.143525
+0.147426 0.325376 -0.930339 0.0828551
+0.12583 0.334322 -0.933766 0.0218307
+0.103695 0.341836 -0.933195 -0.0392873
+0.0811155 0.347886 -0.928627 -0.100237
+0.058189 0.352446 -0.920083 -0.160758
+0.0350134 0.355497 -0.907599 -0.22059
+0.0116878 0.357026 -0.891229 -0.279477
+-0.0116879 0.357026 -0.871042 -0.337168
+-0.0350134 0.355497 -0.847125 -0.393415
+-0.0581891 0.352446 -0.819581 -0.447977
+-0.0811156 0.347886 -0.788527 -0.500621
+-0.103695 0.341836 -0.754096 -0.551121
+-0.12583 0.334322 -0.716436 -0.599262
+-0.147426 0.325376 -0.675709 -0.644836
+-0.168391 0.315038 -0.632088 -0.687649
+-0.188635 0.30335 -0.58576 -0.727517
+-0.208071 0.290363 -0.536924 -0.76427
+-0.226616 0.276133 -0.485789 -0.79775
+-0.244191 0.26072 -0.432574 -0.827814
+-0.26072 0.244191 -0.377506 -0.854333
+-0.276133 0.226616 -0.320821 -0.877194
+-0.290363 0.208071 -0.262763 -0.896299
+-0.30335 0.188635 -0.20358 -0.911565
+-0.315037 0.168391 -0.143525 -0.922928
+-0.325376 0.147426 -0.0828552 -0.930339
+-0.334322 0.12583 -0.0218306 -0.933766
+-0.341836 0.103695 0.0392872 -0.933195
+-0.347886 0.0811155 0.100237 -0.928627
+-0.352446 0.0581891 0.160757 -0.920083
+-0.355497 0.0350134 0.22059 -0.907599
+-0.357026 0.0116879 0.279477 -0.891229
+0.357026 0.0116878 -0.522539 0.774176
+0.355497 0.0350134 -0.572054 0.738343
+0.352446 0.0581891 -0.619119 0.699348
+0.347886 0.0811156 -0.663533 0.657358
+0.341836 0.103695 -0.705106 0.612553
+0.334322 0.12583 -0.743659 0.565126
+0.325376 0.147426 -0.779028 0.515278
+0.315037 0.168391 -0.811061 0.463224
+0.30335 0.188635 -0.83962 0.409186
+0.290363 0.208071 -0.864585 0.353396
+0.276133 0.226616 -0.885847 0.296093
+0.26072 0.244191 -0.903316 0.237522
+0.244191 0.26072 -0.916916 0.177934
+0.226616 0.276133 -0.92659 0.117584
+0.208071 0.290363 -0.932297 0.05673
+0.188635 0.30335 -0.934011 -0.00436661
+0.168391 0.315038 -0.931726 -0.0654445
+0.147426 0.325376 -0.925451 -0.126242
+0.12583 0.334322 -0.915212 -0.186499
+0.103695 0.341836 -0.901055 -0.245958
+0.0811155 0.347886 -0.88304 -0.304363
+0.058189 0.352446 -0.861243 -0.361465
+0.0350134 0.355497 -0.835758 -0.417019
+0.0116878 0.357026 -0.806694 -0.470787
+-0.0116879 0.357026 -0.774176 -0.522539
+-0.0350134 0.355497 -0.738343 -0.572054
+-0.0581891 0.352446 -0.699348 -0.619119
+-0.0811156 0.347886 -0.657358 -0.663533
+-0.103695 0.341836 -0.612553 -0.705106
+-0.12583 0.334322 -0.565126 -0.743659
+-0.147426 0.325376 -0.515278 -0.779028
+-0.168391 0.315038 -0.463224 -0.811061
+-0.188635 0.30335 -0.409186 -0.83962
+-0.208071 0.290363 -0.353396 -0.864585
+-0.226616 0.276133 -0.296093 -0.885847
+-0.244191 0.26072 -0.237522 -0.903316
+-0.26072 0.244191 -0.177934 -0.916916
+-0.276133 0.226616 -0.117584 -0.92659
+-0.290363 0.208071 -0.0567302 -0.932297
+-0.30335 0.188635 0.00436654 -0.934011
+-0.315037 0.168391 0.0654443 -0.931726
+-0.325376 0.147426 0.126242 -0.925451
+-0.334322 0.12583 0.186499 -0.915212
+-0.341836 0.103695 0.245958 -0.901055
+-0.347886 0.0811155 0.304363 -0.88304
+-0.352446 0.0581891 0.361465 -0.861243
+-0.355497 0.0350134 0.417019 -0.835758
+-0.357026 0.0116879 0.470787 -0.806694
+0.40803 0.0133575 -0.601898 0.686333
+0.406282 0.0400153 -0.645497 0.645497
+0.402795 0.0665018 -0.686333 0.601898
+0.397584 0.0927035 -0.724229 0.555721
+0.390669 0.118508 -0.759024 0.507164
+0.382082 0.143805 -0.790569 0.456435
+0.371859 0.168487 -0.818729 0.403752
+0.360043 0.192447 -0.843383 0.349341
+0.346685 0.215583 -0.864425 0.293433
+0.331843 0.237796 -0.881766 0.236268
+0.31558 0.25899 -0.89533 0.178092
+0.297966 0.279075 -0.905061 0.119154
+0.279075 0.297966 -0.910916 0.0597046
+0.25899 0.31558 -0.912871 -2.0411e-09
+0.237796 0.331843 -0.910916 -0.0597047
+0.215583 0.346685 -0.905061 -0.119154
+0.192447 0.360043 -0.89533 -0.178092
+0.168487 0.371859 -0.881766 -0.236268
+0.143805 0.382082 -0.864425 -0.293433
+0.118508 0.390669 -0.843383 -0.349341
+0.0927035 0.397584 -0.818729 -0.403753
+0.0665017 0.402795 -0.790569 -0.456436
+0.0400153 0.406282 -0.759024 -0.507164
+0.0133575 0.40803 -0.724229 -0.555721
+-0.0133575 0.40803 -0.686333 -0.601898
+-0.0400154 0.406282 -0.645497 -0.645497
+-0.0665018 0.402795 -0.601898 -0.686333
+-0.0927035 0.397584 -0.555721 -0.724229
+-0.118508 0.390669 -0.507164 -0.759025
+-0.143806 0.382082 -0.456435 -0.790569
+-0.168487 0.371859 -0.403752 -0.818729
+-0.192447 0.360043 -0.349341 -0.843383
+-0.215583 0.346685 -0.293433 -0.864425
+-0.237796 0.331843 -0.236268 -0.881766
+-0.25899 0.31558 -0.178092 -0.89533
+-0.279075 0.297966 -0.119154 -0.905061
+-0.297966 0.279075 -0.0597047 -0.910916
+-0.31558 0.25899 4.1944e-08 -0.912871
+-0.331843 0.237796 0.0597045 -0.910916
+-0.346685 0.215583 0.119154 -0.905061
+-0.360043 0.192447 0.178092 -0.89533
+-0.371859 0.168487 0.236268 -0.881766
+-0.382082 0.143805 0.293433 -0.864425
+-0.390669 0.118508 0.349341 -0.843383
+-0.397584 0.0927035 0.403753 -0.818729
+-0.402795 0.0665018 0.456435 -0.790569
+-0.406282 0.0400153 0.507164 -0.759024
+-0.40803 0.0133575 0.555721 -0.724229
+0.40803 0.0133575 -0.456435 0.790569
+0.406282 0.0400153 -0.507164 0.759024
+0.402795 0.0665018 -0.555721 0.724229
+0.397584 0.0927035 -0.601898 0.686333
+0.390669 0.118508 -0.645497 0.645497
+0.382082 0.143805 -0.686333 0.601898
+0.371859 0.168487 -0.724229 0.555721
+0.360043 0.192447 -0.759024 0.507164
+0.346685 0.215583 -0.790569 0.456435
+0.331843 0.237796 -0.818729 0.403752
+0.31558 0.25899 -0.843383 0.349341
+0.297966 0.279075 -0.864425 0.293433
+0.279075 0.297966 -0.881766 0.236268
+0.25899 0.31558 -0.89533 0.178092
+0.237796 0.331843 -0.905061 0.119154
+0.215583 0.346685 -0.910916 0.0597045
+0.192447 0.360043 -0.912871 -7.50431e-08
+0.168487 0.371859 -0.910916 -0.0597047
+0.143805 0.382082 -0.905061 -0.119154
+0.118508 0.390669 -0.89533 -0.178092
+0.0927035 0.397584 -0.881766 -0.236268
+0.0665017 0.402795 -0.864425 -0.293433
+0.0400153 0.406282 -0.843383 -0.349341
+0.0133575 0.40803 -0.818729 -0.403753
+-0.0133575 0.40803 -0.790569 -0.456436
+-0.0400154 0.406282 -0.759024 -0.507164
+-0.0665018 0.402795 -0.724229 -0.555721
+-0.0927035 0.397584 -0.686333 -0.601898
+-0.118508 0.390669 -0.645497 -0.645497
+-0.143806 0.382082 -0.601898 -0.686333
+-0.168487 0.371859 -0.555721 -0.724229
+-0.192447 0.360043 -0.507164 -0.759024
+-0.215583 0.346685 -0.456435 -0.790569
+-0.237796 0.331843 -0.403752 -0.818729
+-0.25899 0.31558 -0.349341 -0.843383
+-0.279075 0.297966 -0.293433 -0.864425
+-0.297966 0.279075 -0.236268 -0.881766
+-0.31558 0.25899 -0.178092 -0.89533
+-0.331843 0.237796 -0.119154 -0.905061
+-0.346685 0.215583 -0.0597046 -0.910916
+-0.360043 0.192447 -1.02699e-07 -0.912871
+-0.371859 0.168487 0.0597046 -0.910916
+-0.382082 0.143805 0.119154 -0.905061
+-0.390669 0.118508 0.178092 -0.89533
+-0.397584 0.0927035 0.236268 -0.881766
+-0.402795 0.0665018 0.293433 -0.864425
+-0.406282 0.0400153 0.349341 -0.843383
+-0.40803 0.0133575 0.403752 -0.818729
+0.456191 0.0149342 -0.518263 0.723236
+0.454238 0.0447385 -0.564456 0.687791
+0.450339 0.0743513 -0.608231 0.649401
+0.444512 0.103646 -0.649401 0.608231
+0.436782 0.132496 -0.687791 0.564456
+0.427181 0.160779 -0.723236 0.518263
+0.415751 0.188374 -0.755583 0.469852
+0.40254 0.215162 -0.784695 0.419428
+0.387606 0.241029 -0.810447 0.367209
+0.371012 0.265863 -0.832728 0.313417
+0.352829 0.28956 -0.851444 0.258283
+0.333136 0.312016 -0.866513 0.202043
+0.312016 0.333136 -0.877872 0.144937
+0.28956 0.352829 -0.885472 0.0872114
+0.265863 0.371012 -0.88928 0.029112
+0.241029 0.387606 -0.88928 -0.0291121
+0.215162 0.40254 -0.885472 -0.0872115
+0.188374 0.415751 -0.877872 -0.144937
+0.160779 0.427181 -0.866513 -0.202043
+0.132496 0.436782 -0.851444 -0.258283
+0.103646 0.444512 -0.832728 -0.313417
+0.0743512 0.450339 -0.810447 -0.367209
+0.0447384 0.454238 -0.784695 -0.419428
+0.0149341 0.456191 -0.755583 -0.469852
+-0.0149342 0.456191 -0.723236 -0.518263
+-0.0447385 0.454238 -0.687791 -0.564456
+-0.0743513 0.450339 -0.649401 -0.608231
+-0.103646 0.444512 -0.608231 -0.649401
+-0.132496 0.436781 -0.564455 -0.687791
+-0.160779 0.427181 -0.518263 -0.723236
+-0.188374 0.415751 -0.469852 -0.755583
+-0.215162 0.40254 -0.419428 -0.784695
+-0.241029 0.387606 -0.367209 -0.810447
+-0.265864 0.371012 -0.313417 -0.832728
+-0.28956 0.352829 -0.258283 -0.851444
+-0.312016 0.333136 -0.202043 -0.866513
+-0.333136 0.312016 -0.144937 -0.877872
+-0.352829 0.28956 -0.0872113 -0.885472
+-0.371012 0.265864 -0.0291121 -0.88928
+-0.387606 0.241029 0.029112 -0.88928
+-0.40254 0.215162 0.0872113 -0.885472
+-0.415751 0.188374 0.144937 -0.877872
+-0.427181 0.160779 0.202043 -0.866513
+-0.436782 0.132496 0.258283 -0.851444
+-0.444512 0.103646 0.313417 -0.832728
+-0.450339 0.0743513 0.367209 -0.810447
+-0.454238 0.0447385 0.419428 -0.784695
+-0.456191 0.0149342 0.469852 -0.755583
+0.357026 0.0116878 -0.134889 0.92423
+0.355497 0.0350134 -0.195048 0.913429
+0.352446 0.0581891 -0.254372 0.898716
+0.347886 0.0811156 -0.312606 0.880155
+0.341836 0.103695 -0.369501 0.857825
+0.334322 0.12583 -0.424815 0.831822
+0.325376 0.147426 -0.478309 0.802257
+0.315037 0.168391 -0.529755 0.769256
+0.30335 0.188635 -0.578933 0.732962
+0.290363 0.208071 -0.625631 0.693528
+0.276133 0.226616 -0.66965 0.651125
+0.26072 0.244191 -0.710802 0.605934
+0.244191 0.26072 -0.74891 0.558148
+0.226616 0.276133 -0.783811 0.507972
+0.208071 0.290363 -0.815356 0.45562
+0.188635 0.30335 -0.84341 0.401318
+0.168391 0.315038 -0.867851 0.345297
+0.147426 0.325376 -0.888577 0.287798
+0.12583 0.334322 -0.905497 0.229066
+0.103695 0.341836 -0.91854 0.169353
+0.0811155 0.347886 -0.927649 0.108915
+0.058189 0.352446 -0.932787 0.0480106
+0.0350134 0.355497 -0.933929 -0.0130993
+0.0116878 0.357026 -0.931073 -0.0741532
+-0.0116879 0.357026 -0.92423 -0.134889
+-0.0350134 0.355497 -0.913429 -0.195048
+-0.0581891 0.352446 -0.898716 -0.254372
+-0.0811156 0.347886 -0.880155 -0.312606
+-0.103695 0.341836 -0.857825 -0.369502
+-0.12583 0.334322 -0.831822 -0.424815
+-0.147426 0.325376 -0.802257 -0.478309
+-0.168391 0.315038 -0.769257 -0.529755
+-0.188635 0.30335 -0.732962 -0.578933
+-0.208071 0.290363 -0.693528 -0.625631
+-0.226616 0.276133 -0.651125 -0.66965
+-0.244191 0.26072 -0.605934 -0.710802
+-0.26072 0.244191 -0.558148 -0.74891
+-0.276133 0.226616 -0.507972 -0.783812
+-0.290363 0.208071 -0.455621 -0.815356
+-0.30335 0.188635 -0.401318 -0.84341
+-0.315037 0.168391 -0.345297 -0.867851
+-0.325376 0.147426 -0.287798 -0.888577
+-0.334322 0.12583 -0.229066 -0.905497
+-0.341836 0.103695 -0.169353 -0.91854
+-0.347886 0.0811155 -0.108915 -0.927649
+-0.352446 0.0581891 -0.0480108 -0.932787
+-0.355497 0.0350134 0.0130992 -0.933929
+-0.357026 0.0116879 0.074153 -0.931073
+0.40803 0.0133575 -0.293433 0.864425
+0.406282 0.0400153 -0.349341 0.843383
+0.402795 0.0665018 -0.403752 0.818729
+0.397584 0.0927035 -0.456435 0.790569
+0.390669 0.118508 -0.507164 0.759024
+0.382082 0.143805 -0.555721 0.724229
+0.371859 0.168487 -0.601898 0.686333
+0.360043 0.192447 -0.645497 0.645497
+0.346685 0.215583 -0.686333 0.601898
+0.331843 0.237796 -0.724229 0.555721
+0.31558 0.25899 -0.759024 0.507164
+0.297966 0.279075 -0.790569 0.456435
+0.279075 0.297966 -0.818729 0.403752
+0.25899 0.31558 -0.843383 0.349341
+0.237796 0.331843 -0.864425 0.293433
+0.215583 0.346685 -0.881766 0.236268
+0.192447 0.360043 -0.89533 0.178092
+0.168487 0.371859 -0.905061 0.119154
+0.143805 0.382082 -0.910916 0.0597046
+0.118508 0.390669 -0.912871 -3.92225e-08
+0.0927035 0.397584 -0.910916 -0.0597046
+0.0665017 0.402795 -0.905061 -0.119154
+0.0400153 0.406282 -0.89533 -0.178092
+0.0133575 0.40803 -0.881766 -0.236268
+-0.0133575 0.40803 -0.864425 -0.293433
+-0.0400154 0.406282 -0.843383 -0.349341
+-0.0665018 0.402795 -0.818729 -0.403753
+-0.0927035 0.397584 -0.790569 -0.456436
+-0.118508 0.390669 -0.759024 -0.507164
+-0.143806 0.382082 -0.724229 -0.555721
+-0.168487 0.371859 -0.686333 -0.601898
+-0.192447 0.360043 -0.645497 -0.645497
+-0.215583 0.346685 -0.601898 -0.686333
+-0.237796 0.331843 -0.555721 -0.724229
+-0.25899 0.31558 -0.507164 -0.759024
+-0.279075 0.297966 -0.456435 -0.790569
+-0.297966 0.279075 -0.403753 -0.818729
+-0.31558 0.25899 -0.349341 -0.843383
+-0.331843 0.237796 -0.293433 -0.864425
+-0.346685 0.215583 -0.236268 -0.881766
+-0.360043 0.192447 -0.178092 -0.89533
+-0.371859 0.168487 -0.119154 -0.905061
+-0.382082 0.143805 -0.0597045 -0.910916
+-0.390669 0.118508 -2.96973e-08 -0.912871
+-0.397584 0.0927035 0.0597047 -0.910916
+-0.402795 0.0665018 0.119154 -0.905061
+-0.406282 0.0400153 0.178092 -0.89533
+-0.40803 0.0133575 0.236268 -0.881766
+0.40803 0.0133575 -0.119154 0.905061
+0.406282 0.0400153 -0.178092 0.89533
+0.402795 0.0665018 -0.236268 0.881766
+0.397584 0.0927035 -0.293433 0.864425
+0.390669 0.118508 -0.349341 0.843383
+0.382082 0.143805 -0.403752 0.818729
+0.371859 0.168487 -0.456435 0.790569
+0.360043 0.192447 -0.507164 0.759024
+0.346685 0.215583 -0.555721 0.724229
+0.331843 0.237796 -0.601898 0.686333
+0.31558 0.25899 -0.645497 0.645497
+0.297966 0.279075 -0.686333 0.601898
+0.279075 0.297966 -0.724229 0.555721
+0.25899 0.31558 -0.759024 0.507164
+0.237796 0.331843 -0.790569 0.456435
+0.215583 0.346685 -0.818729 0.403752
+0.192447 0.360043 -0.843383 0.349341
+0.168487 0.371859 -0.864425 0.293433
+0.143805 0.382082 -0.881766 0.236268
+0.118508 0.390669 -0.89533 0.178092
+0.0927035 0.397584 -0.905061 0.119154
+0.0665017 0.402795 -0.910916 0.0597045
+0.0400153 0.406282 -0.912871 -1.12225e-07
+0.0133575 0.40803 -0.910916 -0.0597047
+-0.0133575 0.40803 -0.905061 -0.119154
+-0.0400154 0.406282 -0.89533 -0.178092
+-0.0665018 0.402795 -0.881766 -0.236268
+-0.0927035 0.397584 -0.864425 -0.293433
+-0.118508 0.390669 -0.843383 -0.349341
+-0.143806 0.382082 -0.818729 -0.403753
+-0.168487 0.371859 -0.790569 -0.456436
+-0.192447 0.360043 -0.759024 -0.507164
+-0.215583 0.346685 -0.724229 -0.555721
+-0.237796 0.331843 -0.686333 -0.601898
+-0.25899 0.31558 -0.645497 -0.645497
+-0.279075 0.297966 -0.601898 -0.686333
+-0.297966 0.279075 -0.555721 -0.724229
+-0.31558 0.25899 -0.507164 -0.759024
+-0.331843 0.237796 -0.456436 -0.790569
+-0.346685 0.215583 -0.403752 -0.818729
+-0.360043 0.192447 -0.349341 -0.843383
+-0.371859 0.168487 -0.293433 -0.864425
+-0.382082 0.143805 -0.236268 -0.881766
+-0.390669 0.118508 -0.178092 -0.89533
+-0.397584 0.0927035 -0.119153 -0.905061
+-0.402795 0.0665018 -0.0597047 -0.910916
+-0.406282 0.0400153 4.33047e-08 -0.912871
+-0.40803 0.0133575 0.0597045 -0.910916
+0.456191 0.0149342 -0.202043 0.866513
+0.454238 0.0447385 -0.258283 0.851444
+0.450339 0.0743513 -0.313417 0.832728
+0.444512 0.103646 -0.367209 0.810447
+0.436782 0.132496 -0.419428 0.784695
+0.427181 0.160779 -0.469852 0.755583
+0.415751 0.188374 -0.518263 0.723236
+0.40254 0.215162 -0.564456 0.687791
+0.387606 0.241029 -0.608231 0.649401
+0.371012 0.265863 -0.649401 0.608231
+0.352829 0.28956 -0.687791 0.564456
+0.333136 0.312016 -0.723236 0.518263
+0.312016 0.333136 -0.755583 0.469852
+0.28956 0.352829 -0.784695 0.419428
+0.265863 0.371012 -0.810447 0.367209
+0.241029 0.387606 -0.832728 0.313417
+0.215162 0.40254 -0.851444 0.258283
+0.188374 0.415751 -0.866513 0.202043
+0.160779 0.427181 -0.877872 0.144937
+0.132496 0.436782 -0.885472 0.0872114
+0.103646 0.444512 -0.88928 0.029112
+0.0743512 0.450339 -0.88928 -0.0291121
+0.0447384 0.454238 -0.885472 -0.0872115
+0.0149341 0.456191 -0.877872 -0.144937
+-0.0149342 0.456191 -0.866513 -0.202043
+-0.0447385 0.454238 -0.851444 -0.258283
+-0.0743513 0.450339 -0.832728 -0.313417
+-0.103646 0.444512 -0.810447 -0.367209
+-0.132496 0.436781 -0.784695 -0.419428
+-0.160779 0.427181 -0.755583 -0.469852
+-0.188374 0.415751 -0.723236 -0.518263
+-0.215162 0.40254 -0.687791 -0.564455
+-0.241029 0.387606 -0.649401 -0.608231
+-0.265864 0.371012 -0.608231 -0.649401
+-0.28956 0.352829 -0.564456 -0.687791
+-0.312016 0.333136 -0.518263 -0.723236
+-0.333136 0.312016 -0.469852 -0.755583
+-0.352829 0.28956 -0.419428 -0.784695
+-0.371012 0.265864 -0.367209 -0.810447
+-0.387606 0.241029 -0.313417 -0.832728
+-0.40254 0.215162 -0.258283 -0.851444
+-0.415751 0.188374 -0.202043 -0.866513
+-0.427181 0.160779 -0.144937 -0.877872
+-0.436782 0.132496 -0.0872114 -0.885472
+-0.444512 0.103646 -0.029112 -0.88928
+-0.450339 0.0743513 0.029112 -0.88928
+-0.454238 0.0447385 0.0872114 -0.885472
+-0.456191 0.0149342 0.144937 -0.877872
+0.456191 0.0149342 -0.367209 0.810447
+0.454238 0.0447385 -0.419428 0.784695
+0.450339 0.0743513 -0.469852 0.755583
+0.444512 0.103646 -0.518263 0.723236
+0.436782 0.132496 -0.564456 0.687791
+0.427181 0.160779 -0.608231 0.649401
+0.415751 0.188374 -0.649401 0.608231
+0.40254 0.215162 -0.687791 0.564456
+0.387606 0.241029 -0.723236 0.518263
+0.371012 0.265863 -0.755583 0.469852
+0.352829 0.28956 -0.784695 0.419428
+0.333136 0.312016 -0.810447 0.367209
+0.312016 0.333136 -0.832728 0.313417
+0.28956 0.352829 -0.851444 0.258283
+0.265863 0.371012 -0.866513 0.202043
+0.241029 0.387606 -0.877872 0.144937
+0.215162 0.40254 -0.885472 0.0872113
+0.188374 0.415751 -0.88928 0.029112
+0.160779 0.427181 -0.88928 -0.0291121
+0.132496 0.436782 -0.885472 -0.0872114
+0.103646 0.444512 -0.877872 -0.144937
+0.0743512 0.450339 -0.866513 -0.202043
+0.0447384 0.454238 -0.851444 -0.258283
+0.0149341 0.456191 -0.832728 -0.313417
+-0.0149342 0.456191 -0.810447 -0.367209
+-0.0447385 0.454238 -0.784695 -0.419428
+-0.0743513 0.450339 -0.755583 -0.469852
+-0.103646 0.444512 -0.723236 -0.518263
+-0.132496 0.436781 -0.687791 -0.564456
+-0.160779 0.427181 -0.649401 -0.608231
+-0.188374 0.415751 -0.608231 -0.649401
+-0.215162 0.40254 -0.564456 -0.687791
+-0.241029 0.387606 -0.518263 -0.723236
+-0.265864 0.371012 -0.469852 -0.755583
+-0.28956 0.352829 -0.419428 -0.784695
+-0.312016 0.333136 -0.367209 -0.810447
+-0.333136 0.312016 -0.313417 -0.832728
+-0.352829 0.28956 -0.258283 -0.851444
+-0.371012 0.265864 -0.202043 -0.866513
+-0.387606 0.241029 -0.144937 -0.877872
+-0.40254 0.215162 -0.0872115 -0.885472
+-0.415751 0.188374 -0.029112 -0.88928
+-0.427181 0.160779 0.0291121 -0.88928
+-0.436782 0.132496 0.0872114 -0.885472
+-0.444512 0.103646 0.144937 -0.877872
+-0.450339 0.0743513 0.202043 -0.866513
+-0.454238 0.0447385 0.258283 -0.851444
+-0.456191 0.0149342 0.313417 -0.832728
+0.499732 0.0163595 -0.433013 0.75
+0.497592 0.0490086 -0.481138 0.720074
+0.493322 0.0814477 -0.527203 0.687064
+0.486938 0.113538 -0.57101 0.651112
+0.47847 0.145142 -0.612372 0.612372
+0.467953 0.176125 -0.651112 0.57101
+0.455432 0.206354 -0.687064 0.527203
+0.440961 0.235698 -0.720074 0.481138
+0.424601 0.264034 -0.75 0.433013
+0.406423 0.291239 -0.776715 0.383033
+0.386505 0.317197 -0.800103 0.331414
+0.364932 0.341796 -0.820066 0.278375
+0.341796 0.364932 -0.836516 0.224144
+0.317197 0.386505 -0.849385 0.168953
+0.291239 0.406423 -0.858616 0.113039
+0.264034 0.424601 -0.864171 0.0566407
+0.235698 0.440961 -0.866025 -7.11922e-08
+0.206353 0.455432 -0.864171 -0.0566408
+0.176125 0.467953 -0.858616 -0.113039
+0.145142 0.47847 -0.849385 -0.168953
+0.113538 0.486938 -0.836516 -0.224144
+0.0814477 0.493322 -0.820066 -0.278375
+0.0490085 0.497592 -0.800103 -0.331414
+0.0163595 0.499732 -0.776715 -0.383033
+-0.0163596 0.499732 -0.75 -0.433013
+-0.0490086 0.497592 -0.720074 -0.481138
+-0.0814478 0.493322 -0.687064 -0.527203
+-0.113538 0.486938 -0.651112 -0.57101
+-0.145142 0.47847 -0.612372 -0.612373
+-0.176125 0.467953 -0.57101 -0.651112
+-0.206354 0.455432 -0.527203 -0.687064
+-0.235698 0.440961 -0.481138 -0.720074
+-0.264034 0.424601 -0.433013 -0.75
+-0.291239 0.406423 -0.383033 -0.776715
+-0.317197 0.386505 -0.331414 -0.800103
+-0.341796 0.364932 -0.278375 -0.820066
+-0.364932 0.341796 -0.224144 -0.836516
+-0.386505 0.317197 -0.168953 -0.849385
+-0.406423 0.291239 -0.113039 -0.858616
+-0.424601 0.264034 -0.0566408 -0.864171
+-0.440961 0.235698 -9.74292e-08 -0.866025
+-0.455432 0.206354 0.0566408 -0.864171
+-0.467953 0.176125 0.113039 -0.858616
+-0.47847 0.145142 0.168953 -0.849385
+-0.486938 0.113538 0.224144 -0.836516
+-0.493322 0.0814478 0.278375 -0.820066
+-0.497592 0.0490085 0.331414 -0.800103
+-0.499732 0.0163596 0.383033 -0.776715
+0.499732 0.0163595 -0.278375 0.820066
+0.497592 0.0490086 -0.331414 0.800103
+0.493322 0.0814477 -0.383033 0.776715
+0.486938 0.113538 -0.433013 0.75
+0.47847 0.145142 -0.481138 0.720074
+0.467953 0.176125 -0.527203 0.687064
+0.455432 0.206354 -0.57101 0.651112
+0.440961 0.235698 -0.612372 0.612372
+0.424601 0.264034 -0.651112 0.57101
+0.406423 0.291239 -0.687064 0.527203
+0.386505 0.317197 -0.720074 0.481138
+0.364932 0.341796 -0.75 0.433013
+0.341796 0.364932 -0.776715 0.383033
+0.317197 0.386505 -0.800103 0.331414
+0.291239 0.406423 -0.820066 0.278375
+0.264034 0.424601 -0.836516 0.224144
+0.235698 0.440961 -0.849385 0.168953
+0.206353 0.455432 -0.858616 0.113039
+0.176125 0.467953 -0.864171 0.0566407
+0.145142 0.47847 -0.866025 -3.72097e-08
+0.113538 0.486938 -0.864171 -0.0566408
+0.0814477 0.493322 -0.858616 -0.113039
+0.0490085 0.497592 -0.849385 -0.168953
+0.0163595 0.499732 -0.836516 -0.224144
+-0.0163596 0.499732 -0.820066 -0.278375
+-0.0490086 0.497592 -0.800103 -0.331414
+-0.0814478 0.493322 -0.776715 -0.383033
+-0.113538 0.486938 -0.75 -0.433013
+-0.145142 0.47847 -0.720074 -0.481138
+-0.176125 0.467953 -0.687064 -0.527203
+-0.206354 0.455432 -0.651112 -0.57101
+-0.235698 0.440961 -0.612373 -0.612372
+-0.264034 0.424601 -0.57101 -0.651112
+-0.291239 0.406423 -0.527203 -0.687064
+-0.317197 0.386505 -0.481138 -0.720074
+-0.341796 0.364932 -0.433013 -0.75
+-0.364932 0.341796 -0.383033 -0.776715
+-0.386505 0.317197 -0.331414 -0.800103
+-0.406423 0.291239 -0.278375 -0.820066
+-0.424601 0.264034 -0.224144 -0.836516
+-0.440961 0.235698 -0.168953 -0.849385
+-0.455432 0.206354 -0.113039 -0.858616
+-0.467953 0.176125 -0.0566407 -0.864171
+-0.47847 0.145142 -2.81734e-08 -0.866025
+-0.486938 0.113538 0.0566408 -0.864171
+-0.493322 0.0814478 0.113039 -0.858616
+-0.497592 0.0490085 0.168953 -0.849385
+-0.499732 0.0163596 0.224144 -0.836516
+0.539773 0.0176703 -0.347345 0.766606
+0.537461 0.0529353 -0.396739 0.742247
+0.532848 0.0879736 -0.444435 0.71471
+0.525954 0.122635 -0.490228 0.684112
+0.516807 0.156772 -0.533922 0.650585
+0.505447 0.190237 -0.575329 0.614272
+0.491923 0.222887 -0.614272 0.575329
+0.476292 0.254583 -0.650585 0.533921
+0.458622 0.285189 -0.684112 0.490228
+0.438987 0.314574 -0.71471 0.444435
+0.417473 0.342612 -0.742247 0.396739
+0.394172 0.369182 -0.766606 0.347345
+0.369182 0.394172 -0.787682 0.296463
+0.342612 0.417473 -0.805385 0.244311
+0.314574 0.438987 -0.81964 0.191113
+0.285189 0.458622 -0.830384 0.137097
+0.254583 0.476292 -0.837573 0.0824936
+0.222887 0.491923 -0.841175 0.0275372
+0.190237 0.505447 -0.841175 -0.0275373
+0.156772 0.516807 -0.837573 -0.0824938
+0.122635 0.525954 -0.830384 -0.137097
+0.0879735 0.532848 -0.81964 -0.191113
+0.0529352 0.537461 -0.805385 -0.244311
+0.0176703 0.539773 -0.787682 -0.296463
+-0.0176704 0.539773 -0.766606 -0.347345
+-0.0529354 0.537461 -0.742247 -0.39674
+-0.0879736 0.532848 -0.71471 -0.444435
+-0.122635 0.525954 -0.684112 -0.490228
+-0.156772 0.516807 -0.650585 -0.533922
+-0.190237 0.505447 -0.614272 -0.575329
+-0.222887 0.491923 -0.575329 -0.614272
+-0.254583 0.476292 -0.533922 -0.650585
+-0.285189 0.458622 -0.490228 -0.684112
+-0.314574 0.438987 -0.444435 -0.71471
+-0.342612 0.417473 -0.396739 -0.742247
+-0.369182 0.394172 -0.347345 -0.766606
+-0.394172 0.369182 -0.296463 -0.787682
+-0.417473 0.342612 -0.244311 -0.805385
+-0.438987 0.314574 -0.191113 -0.81964
+-0.458622 0.285189 -0.137097 -0.830384
+-0.476292 0.254583 -0.0824938 -0.837573
+-0.491923 0.222887 -0.0275372 -0.841175
+-0.505447 0.190237 0.0275373 -0.841175
+-0.516807 0.156772 0.0824937 -0.837573
+-0.525954 0.122635 0.137097 -0.830384
+-0.532848 0.0879736 0.191113 -0.81964
+-0.537461 0.0529353 0.244311 -0.805385
+-0.539773 0.0176704 0.296463 -0.787682
+0.456191 0.0149342 -0.649401 0.608231
+0.454238 0.0447385 -0.687791 0.564456
+0.450339 0.0743513 -0.723236 0.518263
+0.444512 0.103646 -0.755583 0.469852
+0.436782 0.132496 -0.784695 0.419428
+0.427181 0.160779 -0.810447 0.367209
+0.415751 0.188374 -0.832728 0.313417
+0.40254 0.215162 -0.851444 0.258283
+0.387606 0.241029 -0.866513 0.202043
+0.371012 0.265863 -0.877872 0.144937
+0.352829 0.28956 -0.885472 0.0872114
+0.333136 0.312016 -0.88928 0.029112
+0.312016 0.333136 -0.88928 -0.029112
+0.28956 0.352829 -0.885472 -0.0872114
+0.265863 0.371012 -0.877872 -0.144937
+0.241029 0.387606 -0.866513 -0.202043
+0.215162 0.40254 -0.851444 -0.258283
+0.188374 0.415751 -0.832728 -0.313417
+0.160779 0.427181 -0.810447 -0.367209
+0.132496 0.436782 -0.784695 -0.419428
+0.103646 0.444512 -0.755583 -0.469852
+0.0743512 0.450339 -0.723236 -0.518263
+0.0447384 0.454238 -0.687791 -0.564456
+0.0149341 0.456191 -0.649401 -0.608231
+-0.0149342 0.456191 -0.608231 -0.649401
+-0.0447385 0.454238 -0.564456 -0.687791
+-0.0743513 0.450339 -0.518263 -0.723236
+-0.103646 0.444512 -0.469852 -0.755583
+-0.132496 0.436781 -0.419428 -0.784695
+-0.160779 0.427181 -0.367209 -0.810447
+-0.188374 0.415751 -0.313417 -0.832728
+-0.215162 0.40254 -0.258283 -0.851444
+-0.241029 0.387606 -0.202043 -0.866513
+-0.265864 0.371012 -0.144937 -0.877872
+-0.28956 0.352829 -0.0872114 -0.885472
+-0.312016 0.333136 -0.029112 -0.88928
+-0.333136 0.312016 0.029112 -0.88928
+-0.352829 0.28956 0.0872114 -0.885472
+-0.371012 0.265864 0.144937 -0.877872
+-0.387606 0.241029 0.202043 -0.866513
+-0.40254 0.215162 0.258283 -0.851444
+-0.415751 0.188374 0.313417 -0.832728
+-0.427181 0.160779 0.367209 -0.810447
+-0.436782 0.132496 0.419428 -0.784695
+-0.444512 0.103646 0.469852 -0.755583
+-0.450339 0.0743513 0.518263 -0.723236
+-0.454238 0.0447385 0.564456 -0.687791
+-0.456191 0.0149342 0.608231 -0.649401
+0.499732 0.0163595 -0.687064 0.527203
+0.497592 0.0490086 -0.720074 0.481138
+0.493322 0.0814477 -0.75 0.433013
+0.486938 0.113538 -0.776715 0.383033
+0.47847 0.145142 -0.800103 0.331414
+0.467953 0.176125 -0.820066 0.278375
+0.455432 0.206354 -0.836516 0.224144
+0.440961 0.235698 -0.849385 0.168953
+0.424601 0.264034 -0.858616 0.113039
+0.406423 0.291239 -0.864171 0.0566408
+0.386505 0.317197 -0.866025 1.57003e-08
+0.364932 0.341796 -0.864171 -0.0566408
+0.341796 0.364932 -0.858616 -0.113039
+0.317197 0.386505 -0.849385 -0.168953
+0.291239 0.406423 -0.836516 -0.224144
+0.264034 0.424601 -0.820066 -0.278375
+0.235698 0.440961 -0.800103 -0.331414
+0.206353 0.455432 -0.776715 -0.383033
+0.176125 0.467953 -0.75 -0.433013
+0.145142 0.47847 -0.720074 -0.481138
+0.113538 0.486938 -0.687064 -0.527203
+0.0814477 0.493322 -0.651112 -0.57101
+0.0490085 0.497592 -0.612372 -0.612373
+0.0163595 0.499732 -0.57101 -0.651112
+-0.0163596 0.499732 -0.527203 -0.687064
+-0.0490086 0.497592 -0.481138 -0.720074
+-0.0814478 0.493322 -0.433013 -0.75
+-0.113538 0.486938 -0.383033 -0.776715
+-0.145142 0.47847 -0.331413 -0.800103
+-0.176125 0.467953 -0.278375 -0.820066
+-0.206354 0.455432 -0.224144 -0.836516
+-0.235698 0.440961 -0.168953 -0.849385
+-0.264034 0.424601 -0.113039 -0.858616
+-0.291239 0.406423 -0.0566407 -0.864171
+-0.317197 0.386505 -2.94643e-08 -0.866025
+-0.341796 0.364932 0.0566408 -0.864171
+-0.364932 0.341796 0.113039 -0.858616
+-0.386505 0.317197 0.168953 -0.849385
+-0.406423 0.291239 0.224144 -0.836516
+-0.424601 0.264034 0.278375 -0.820066
+-0.440961 0.235698 0.331413 -0.800103
+-0.455432 0.206354 0.383033 -0.776715
+-0.467953 0.176125 0.433013 -0.75
+-0.47847 0.145142 0.481138 -0.720074
+-0.486938 0.113538 0.527203 -0.687064
+-0.493322 0.0814478 0.57101 -0.651112
+-0.497592 0.0490085 0.612372 -0.612372
+-0.499732 0.0163596 0.651112 -0.57101
+0.499732 0.0163595 -0.57101 0.651112
+0.497592 0.0490086 -0.612372 0.612372
+0.493322 0.0814477 -0.651112 0.57101
+0.486938 0.113538 -0.687064 0.527203
+0.47847 0.145142 -0.720074 0.481138
+0.467953 0.176125 -0.75 0.433013
+0.455432 0.206354 -0.776715 0.383033
+0.440961 0.235698 -0.800103 0.331414
+0.424601 0.264034 -0.820066 0.278375
+0.406423 0.291239 -0.836516 0.224144
+0.386505 0.317197 -0.849385 0.168953
+0.364932 0.341796 -0.858616 0.113039
+0.341796 0.364932 -0.864171 0.0566408
+0.317197 0.386505 -0.866025 -1.93636e-09
+0.291239 0.406423 -0.864171 -0.0566408
+0.264034 0.424601 -0.858616 -0.113039
+0.235698 0.440961 -0.849385 -0.168953
+0.206353 0.455432 -0.836516 -0.224144
+0.176125 0.467953 -0.820066 -0.278375
+0.145142 0.47847 -0.800103 -0.331414
+0.113538 0.486938 -0.776715 -0.383033
+0.0814477 0.493322 -0.75 -0.433013
+0.0490085 0.497592 -0.720074 -0.481138
+0.0163595 0.499732 -0.687064 -0.527203
+-0.0163596 0.499732 -0.651112 -0.57101
+-0.0490086 0.497592 -0.612372 -0.612372
+-0.0814478 0.493322 -0.57101 -0.651112
+-0.113538 0.486938 -0.527203 -0.687064
+-0.145142 0.47847 -0.481138 -0.720074
+-0.176125 0.467953 -0.433013 -0.75
+-0.206354 0.455432 -0.383033 -0.776715
+-0.235698 0.440961 -0.331414 -0.800103
+-0.264034 0.424601 -0.278375 -0.820066
+-0.291239 0.406423 -0.224144 -0.836516
+-0.317197 0.386505 -0.168953 -0.849385
+-0.341796 0.364932 -0.113039 -0.858616
+-0.364932 0.341796 -0.0566408 -0.864171
+-0.386505 0.317197 3.97915e-08 -0.866025
+-0.406423 0.291239 0.0566407 -0.864171
+-0.424601 0.264034 0.113039 -0.858616
+-0.440961 0.235698 0.168953 -0.849385
+-0.455432 0.206354 0.224144 -0.836516
+-0.467953 0.176125 0.278375 -0.820066
+-0.47847 0.145142 0.331414 -0.800103
+-0.486938 0.113538 0.383033 -0.776715
+-0.493322 0.0814478 0.433013 -0.75
+-0.497592 0.0490085 0.481138 -0.720074
+-0.499732 0.0163596 0.527203 -0.687064
+0.539773 0.0176703 -0.614272 0.575329
+0.537461 0.0529353 -0.650585 0.533922
+0.532848 0.0879736 -0.684112 0.490228
+0.525954 0.122635 -0.71471 0.444435
+0.516807 0.156772 -0.742247 0.396739
+0.505447 0.190237 -0.766606 0.347345
+0.491923 0.222887 -0.787682 0.296463
+0.476292 0.254583 -0.805385 0.244311
+0.458622 0.285189 -0.81964 0.191113
+0.438987 0.314574 -0.830384 0.137097
+0.417473 0.342612 -0.837573 0.0824937
+0.394172 0.369182 -0.841175 0.0275372
+0.369182 0.394172 -0.841175 -0.0275372
+0.342612 0.417473 -0.837573 -0.0824937
+0.314574 0.438987 -0.830384 -0.137097
+0.285189 0.458622 -0.81964 -0.191113
+0.254583 0.476292 -0.805385 -0.244311
+0.222887 0.491923 -0.787682 -0.296463
+0.190237 0.505447 -0.766606 -0.347345
+0.156772 0.516807 -0.742247 -0.39674
+0.122635 0.525954 -0.71471 -0.444435
+0.0879735 0.532848 -0.684112 -0.490228
+0.0529352 0.537461 -0.650585 -0.533922
+0.0176703 0.539773 -0.614272 -0.575329
+-0.0176704 0.539773 -0.575329 -0.614272
+-0.0529354 0.537461 -0.533921 -0.650585
+-0.0879736 0.532848 -0.490228 -0.684112
+-0.122635 0.525954 -0.444435 -0.71471
+-0.156772 0.516807 -0.396739 -0.742247
+-0.190237 0.505447 -0.347345 -0.766606
+-0.222887 0.491923 -0.296462 -0.787682
+-0.254583 0.476292 -0.244311 -0.805385
+-0.285189 0.458622 -0.191113 -0.81964
+-0.314574 0.438987 -0.137097 -0.830384
+-0.342612 0.417473 -0.0824937 -0.837573
+-0.369182 0.394172 -0.0275372 -0.841175
+-0.394172 0.369182 0.0275372 -0.841175
+-0.417473 0.342612 0.0824938 -0.837573
+-0.438987 0.314574 0.137097 -0.830384
+-0.458622 0.285189 0.191113 -0.81964
+-0.476292 0.254583 0.244311 -0.805385
+-0.491923 0.222887 0.296463 -0.787682
+-0.505447 0.190237 0.347345 -0.766606
+-0.516807 0.156772 0.396739 -0.742247
+-0.525954 0.122635 0.444435 -0.71471
+-0.532848 0.0879736 0.490228 -0.684112
+-0.537461 0.0529353 0.533922 -0.650585
+-0.539773 0.0176704 0.575329 -0.614272
+0.539773 0.0176703 -0.71471 0.444435
+0.537461 0.0529353 -0.742247 0.396739
+0.532848 0.0879736 -0.766606 0.347345
+0.525954 0.122635 -0.787682 0.296463
+0.516807 0.156772 -0.805385 0.244311
+0.505447 0.190237 -0.81964 0.191113
+0.491923 0.222887 -0.830384 0.137097
+0.476292 0.254583 -0.837573 0.0824937
+0.458622 0.285189 -0.841175 0.0275372
+0.438987 0.314574 -0.841175 -0.0275372
+0.417473 0.342612 -0.837573 -0.0824937
+0.394172 0.369182 -0.830384 -0.137097
+0.369182 0.394172 -0.81964 -0.191113
+0.342612 0.417473 -0.805385 -0.244311
+0.314574 0.438987 -0.787682 -0.296463
+0.285189 0.458622 -0.766606 -0.347345
+0.254583 0.476292 -0.742247 -0.39674
+0.222887 0.491923 -0.71471 -0.444435
+0.190237 0.505447 -0.684112 -0.490228
+0.156772 0.516807 -0.650585 -0.533922
+0.122635 0.525954 -0.614272 -0.575329
+0.0879735 0.532848 -0.575329 -0.614272
+0.0529352 0.537461 -0.533921 -0.650585
+0.0176703 0.539773 -0.490228 -0.684112
+-0.0176704 0.539773 -0.444435 -0.71471
+-0.0529354 0.537461 -0.396739 -0.742247
+-0.0879736 0.532848 -0.347345 -0.766606
+-0.122635 0.525954 -0.296463 -0.787682
+-0.156772 0.516807 -0.244311 -0.805385
+-0.190237 0.505447 -0.191113 -0.81964
+-0.222887 0.491923 -0.137097 -0.830384
+-0.254583 0.476292 -0.0824938 -0.837573
+-0.285189 0.458622 -0.0275372 -0.841175
+-0.314574 0.438987 0.0275373 -0.841175
+-0.342612 0.417473 0.0824937 -0.837573
+-0.369182 0.394172 0.137097 -0.830384
+-0.394172 0.369182 0.191113 -0.81964
+-0.417473 0.342612 0.244311 -0.805385
+-0.438987 0.314574 0.296463 -0.787682
+-0.458622 0.285189 0.347345 -0.766606
+-0.476292 0.254583 0.396739 -0.742247
+-0.491923 0.222887 0.444435 -0.71471
+-0.505447 0.190237 0.490228 -0.684112
+-0.516807 0.156772 0.533921 -0.650585
+-0.525954 0.122635 0.575329 -0.614272
+-0.532848 0.0879736 0.614272 -0.575329
+-0.537461 0.0529353 0.650585 -0.533921
+-0.539773 0.0176704 0.684112 -0.490228
+0.577041 0.0188904 -0.732294 0.361127
+0.57457 0.0565902 -0.754344 0.31246
+0.569639 0.0940477 -0.773165 0.262454
+0.562268 0.131103 -0.788675 0.211325
+0.55249 0.167596 -0.800808 0.159291
+0.540346 0.203372 -0.809511 0.106574
+0.525887 0.238277 -0.814748 0.0534014
+0.509177 0.272161 -0.816497 -1.72366e-08
+0.490287 0.30488 -0.814748 -0.0534014
+0.469297 0.336294 -0.809511 -0.106574
+0.446298 0.366267 -0.800808 -0.159291
+0.421387 0.394672 -0.788675 -0.211325
+0.394672 0.421387 -0.773165 -0.262454
+0.366267 0.446298 -0.754344 -0.31246
+0.336294 0.469297 -0.732294 -0.361127
+0.30488 0.490287 -0.707107 -0.408248
+0.272161 0.509178 -0.678892 -0.453621
+0.238276 0.525887 -0.64777 -0.497052
+0.203372 0.540346 -0.613875 -0.538354
+0.167596 0.55249 -0.57735 -0.57735
+0.131103 0.562268 -0.538354 -0.613875
+0.0940477 0.569639 -0.497052 -0.64777
+0.0565902 0.57457 -0.453621 -0.678892
+0.0188903 0.577041 -0.408248 -0.707107
+-0.0188904 0.577041 -0.361127 -0.732294
+-0.0565903 0.57457 -0.31246 -0.754345
+-0.0940478 0.569639 -0.262454 -0.773165
+-0.131103 0.562268 -0.211325 -0.788675
+-0.167596 0.55249 -0.15929 -0.800808
+-0.203372 0.540346 -0.106574 -0.809511
+-0.238277 0.525887 -0.0534013 -0.814748
+-0.272161 0.509178 -9.30742e-08 -0.816497
+-0.30488 0.490287 0.0534014 -0.814748
+-0.336294 0.469297 0.106574 -0.809511
+-0.366267 0.446298 0.159291 -0.800808
+-0.394672 0.421387 0.211325 -0.788675
+-0.421387 0.394672 0.262454 -0.773165
+-0.446298 0.366267 0.31246 -0.754344
+-0.469297 0.336294 0.361127 -0.732294
+-0.490287 0.30488 0.408248 -0.707107
+-0.509177 0.272161 0.453621 -0.678892
+-0.525887 0.238277 0.497052 -0.64777
+-0.540346 0.203372 0.538354 -0.613875
+-0.55249 0.167596 0.57735 -0.57735
+-0.562268 0.131103 0.613875 -0.538354
+-0.569639 0.0940478 0.64777 -0.497052
+-0.57457 0.0565902 0.678892 -0.453621
+-0.577041 0.0188904 0.707107 -0.408248
+0.577041 0.0188904 -0.64777 0.497052
+0.57457 0.0565902 -0.678892 0.453621
+0.569639 0.0940477 -0.707107 0.408248
+0.562268 0.131103 -0.732294 0.361127
+0.55249 0.167596 -0.754344 0.31246
+0.540346 0.203372 -0.773165 0.262454
+0.525887 0.238277 -0.788675 0.211325
+0.509177 0.272161 -0.800808 0.159291
+0.490287 0.30488 -0.809511 0.106574
+0.469297 0.336294 -0.814748 0.0534014
+0.446298 0.366267 -0.816497 1.48024e-08
+0.421387 0.394672 -0.814748 -0.0534015
+0.394672 0.421387 -0.809511 -0.106574
+0.366267 0.446298 -0.800808 -0.159291
+0.336294 0.469297 -0.788675 -0.211325
+0.30488 0.490287 -0.773165 -0.262454
+0.272161 0.509178 -0.754344 -0.31246
+0.238276 0.525887 -0.732294 -0.361127
+0.203372 0.540346 -0.707107 -0.408248
+0.167596 0.55249 -0.678892 -0.453621
+0.131103 0.562268 -0.64777 -0.497052
+0.0940477 0.569639 -0.613875 -0.538354
+0.0565902 0.57457 -0.57735 -0.57735
+0.0188903 0.577041 -0.538354 -0.613875
+-0.0188904 0.577041 -0.497052 -0.64777
+-0.0565903 0.57457 -0.453621 -0.678892
+-0.0940478 0.569639 -0.408248 -0.707107
+-0.131103 0.562268 -0.361127 -0.732294
+-0.167596 0.55249 -0.31246 -0.754345
+-0.203372 0.540346 -0.262454 -0.773165
+-0.238277 0.525887 -0.211325 -0.788675
+-0.272161 0.509178 -0.159291 -0.800808
+-0.30488 0.490287 -0.106574 -0.809511
+-0.336294 0.469297 -0.0534014 -0.814748
+-0.366267 0.446298 -2.77792e-08 -0.816497
+-0.394672 0.421387 0.0534015 -0.814748
+-0.421387 0.394672 0.106574 -0.809511
+-0.446298 0.366267 0.159291 -0.800808
+-0.469297 0.336294 0.211325 -0.788675
+-0.490287 0.30488 0.262454 -0.773165
+-0.509177 0.272161 0.31246 -0.754345
+-0.525887 0.238277 0.361127 -0.732294
+-0.540346 0.203372 0.408248 -0.707107
+-0.55249 0.167596 0.453621 -0.678892
+-0.562268 0.131103 0.497052 -0.64777
+-0.569639 0.0940478 0.538354 -0.613875
+-0.57457 0.0565902 0.57735 -0.57735
+-0.577041 0.0188904 0.613875 -0.538354
+0.612045 0.0200363 -0.671353 0.417474
+0.609424 0.060023 -0.69722 0.372672
+0.604193 0.0997527 -0.720101 0.326274
+0.596375 0.139055 -0.739899 0.278478
+0.586004 0.177762 -0.756528 0.22949
+0.573123 0.215708 -0.769917 0.17952
+0.557788 0.25273 -0.78001 0.12878
+0.540064 0.28867 -0.786763 0.0774893
+0.520028 0.323374 -0.790146 0.0258667
+0.497765 0.356693 -0.790146 -0.0258667
+0.47337 0.388485 -0.786763 -0.0774893
+0.446949 0.418613 -0.78001 -0.12878
+0.418613 0.446949 -0.769917 -0.17952
+0.388485 0.47337 -0.756528 -0.22949
+0.356693 0.497765 -0.739899 -0.278478
+0.323374 0.520028 -0.720101 -0.326274
+0.28867 0.540064 -0.69722 -0.372672
+0.25273 0.557788 -0.671353 -0.417474
+0.215708 0.573123 -0.642612 -0.460489
+0.177762 0.586004 -0.611118 -0.501532
+0.139055 0.596375 -0.577008 -0.540427
+0.0997526 0.604193 -0.540427 -0.577008
+0.0600229 0.609424 -0.501532 -0.611118
+0.0200362 0.612045 -0.460489 -0.642612
+-0.0200363 0.612045 -0.417474 -0.671353
+-0.060023 0.609424 -0.372672 -0.69722
+-0.0997527 0.604193 -0.326274 -0.720101
+-0.139055 0.596375 -0.278478 -0.739899
+-0.177762 0.586004 -0.22949 -0.756528
+-0.215708 0.573123 -0.179519 -0.769917
+-0.25273 0.557788 -0.12878 -0.78001
+-0.28867 0.540064 -0.0774894 -0.786763
+-0.323374 0.520028 -0.0258667 -0.790146
+-0.356693 0.497765 0.0258668 -0.790146
+-0.388485 0.47337 0.0774893 -0.786763
+-0.418613 0.446949 0.12878 -0.78001
+-0.446949 0.418613 0.17952 -0.769917
+-0.47337 0.388485 0.22949 -0.756528
+-0.497765 0.356693 0.278478 -0.739899
+-0.520028 0.323374 0.326274 -0.720101
+-0.540064 0.28867 0.372672 -0.69722
+-0.557788 0.25273 0.417474 -0.671353
+-0.573123 0.215708 0.460489 -0.642612
+-0.586004 0.177762 0.501532 -0.611118
+-0.596375 0.139055 0.540427 -0.577008
+-0.604193 0.0997527 0.577008 -0.540427
+-0.609424 0.060023 0.611118 -0.501532
+-0.612045 0.0200363 0.642612 -0.460489
+0.539773 0.0176703 -0.490228 0.684112
+0.537461 0.0529353 -0.533922 0.650585
+0.532848 0.0879736 -0.575329 0.614272
+0.525954 0.122635 -0.614272 0.575329
+0.516807 0.156772 -0.650585 0.533922
+0.505447 0.190237 -0.684112 0.490228
+0.491923 0.222887 -0.71471 0.444435
+0.476292 0.254583 -0.742247 0.396739
+0.458622 0.285189 -0.766606 0.347345
+0.438987 0.314574 -0.787682 0.296463
+0.417473 0.342612 -0.805385 0.244311
+0.394172 0.369182 -0.81964 0.191113
+0.369182 0.394172 -0.830384 0.137097
+0.342612 0.417473 -0.837573 0.0824937
+0.314574 0.438987 -0.841175 0.0275372
+0.285189 0.458622 -0.841175 -0.0275373
+0.254583 0.476292 -0.837573 -0.0824938
+0.222887 0.491923 -0.830384 -0.137097
+0.190237 0.505447 -0.81964 -0.191113
+0.156772 0.516807 -0.805385 -0.244311
+0.122635 0.525954 -0.787682 -0.296463
+0.0879735 0.532848 -0.766606 -0.347345
+0.0529352 0.537461 -0.742247 -0.39674
+0.0176703 0.539773 -0.71471 -0.444435
+-0.0176704 0.539773 -0.684112 -0.490228
+-0.0529354 0.537461 -0.650585 -0.533922
+-0.0879736 0.532848 -0.614272 -0.575329
+-0.122635 0.525954 -0.575329 -0.614272
+-0.156772 0.516807 -0.533921 -0.650585
+-0.190237 0.505447 -0.490228 -0.684112
+-0.222887 0.491923 -0.444435 -0.71471
+-0.254583 0.476292 -0.39674 -0.742247
+-0.285189 0.458622 -0.347345 -0.766606
+-0.314574 0.438987 -0.296463 -0.787682
+-0.342612 0.417473 -0.244311 -0.805385
+-0.369182 0.394172 -0.191113 -0.81964
+-0.394172 0.369182 -0.137097 -0.830384
+-0.417473 0.342612 -0.0824937 -0.837573
+-0.438987 0.314574 -0.0275373 -0.841175
+-0.458622 0.285189 0.0275372 -0.841175
+-0.476292 0.254583 0.0824936 -0.837573
+-0.491923 0.222887 0.137097 -0.830384
+-0.505447 0.190237 0.191113 -0.81964
+-0.516807 0.156772 0.244311 -0.805385
+-0.525954 0.122635 0.296463 -0.787682
+-0.532848 0.0879736 0.347345 -0.766606
+-0.537461 0.0529353 0.39674 -0.742247
+-0.539773 0.0176704 0.444435 -0.71471
+0.577041 0.0188904 -0.538354 0.613875
+0.57457 0.0565902 -0.57735 0.57735
+0.569639 0.0940477 -0.613875 0.538354
+0.562268 0.131103 -0.64777 0.497052
+0.55249 0.167596 -0.678892 0.453621
+0.540346 0.203372 -0.707107 0.408248
+0.525887 0.238277 -0.732294 0.361127
+0.509177 0.272161 -0.754344 0.31246
+0.490287 0.30488 -0.773165 0.262454
+0.469297 0.336294 -0.788675 0.211325
+0.446298 0.366267 -0.800808 0.159291
+0.421387 0.394672 -0.809511 0.106574
+0.394672 0.421387 -0.814748 0.0534014
+0.366267 0.446298 -0.816497 -1.82562e-09
+0.336294 0.469297 -0.814748 -0.0534015
+0.30488 0.490287 -0.809511 -0.106574
+0.272161 0.509178 -0.800808 -0.159291
+0.238276 0.525887 -0.788675 -0.211325
+0.203372 0.540346 -0.773165 -0.262454
+0.167596 0.55249 -0.754344 -0.31246
+0.131103 0.562268 -0.732294 -0.361127
+0.0940477 0.569639 -0.707107 -0.408248
+0.0565902 0.57457 -0.678892 -0.453621
+0.0188903 0.577041 -0.64777 -0.497052
+-0.0188904 0.577041 -0.613875 -0.538354
+-0.0565903 0.57457 -0.57735 -0.57735
+-0.0940478 0.569639 -0.538354 -0.613875
+-0.131103 0.562268 -0.497052 -0.64777
+-0.167596 0.55249 -0.453621 -0.678892
+-0.203372 0.540346 -0.408248 -0.707107
+-0.238277 0.525887 -0.361127 -0.732294
+-0.272161 0.509178 -0.31246 -0.754344
+-0.30488 0.490287 -0.262454 -0.773165
+-0.336294 0.469297 -0.211325 -0.788675
+-0.366267 0.446298 -0.159291 -0.800808
+-0.394672 0.421387 -0.106574 -0.809511
+-0.421387 0.394672 -0.0534015 -0.814748
+-0.446298 0.366267 3.75158e-08 -0.816497
+-0.469297 0.336294 0.0534014 -0.814748
+-0.490287 0.30488 0.106574 -0.809511
+-0.509177 0.272161 0.15929 -0.800808
+-0.525887 0.238277 0.211325 -0.788675
+-0.540346 0.203372 0.262454 -0.773165
+-0.55249 0.167596 0.31246 -0.754344
+-0.562268 0.131103 0.361127 -0.732294
+-0.569639 0.0940478 0.408248 -0.707107
+-0.57457 0.0565902 0.453621 -0.678892
+-0.577041 0.0188904 0.497052 -0.64777
+0.577041 0.0188904 -0.408248 0.707107
+0.57457 0.0565902 -0.453621 0.678892
+0.569639 0.0940477 -0.497052 0.64777
+0.562268 0.131103 -0.538354 0.613875
+0.55249 0.167596 -0.57735 0.57735
+0.540346 0.203372 -0.613875 0.538354
+0.525887 0.238277 -0.64777 0.497052
+0.509177 0.272161 -0.678892 0.453621
+0.490287 0.30488 -0.707107 0.408248
+0.469297 0.336294 -0.732294 0.361127
+0.446298 0.366267 -0.754344 0.31246
+0.421387 0.394672 -0.773165 0.262454
+0.394672 0.421387 -0.788675 0.211325
+0.366267 0.446298 -0.800808 0.159291
+0.336294 0.469297 -0.809511 0.106574
+0.30488 0.490287 -0.814748 0.0534014
+0.272161 0.509178 -0.816497 -6.71206e-08
+0.238276 0.525887 -0.814748 -0.0534015
+0.203372 0.540346 -0.809511 -0.106574
+0.167596 0.55249 -0.800808 -0.159291
+0.131103 0.562268 -0.788675 -0.211325
+0.0940477 0.569639 -0.773165 -0.262454
+0.0565902 0.57457 -0.754344 -0.31246
+0.0188903 0.577041 -0.732293 -0.361127
+-0.0188904 0.577041 -0.707107 -0.408248
+-0.0565903 0.57457 -0.678892 -0.453621
+-0.0940478 0.569639 -0.64777 -0.497052
+-0.131103 0.562268 -0.613875 -0.538354
+-0.167596 0.55249 -0.57735 -0.57735
+-0.203372 0.540346 -0.538354 -0.613875
+-0.238277 0.525887 -0.497052 -0.64777
+-0.272161 0.509178 -0.453621 -0.678892
+-0.30488 0.490287 -0.408248 -0.707107
+-0.336294 0.469297 -0.361127 -0.732294
+-0.366267 0.446298 -0.31246 -0.754344
+-0.394672 0.421387 -0.262454 -0.773165
+-0.421387 0.394672 -0.211325 -0.788675
+-0.446298 0.366267 -0.159291 -0.800808
+-0.469297 0.336294 -0.106574 -0.809511
+-0.490287 0.30488 -0.0534014 -0.814748
+-0.509177 0.272161 -9.18571e-08 -0.816497
+-0.525887 0.238277 0.0534014 -0.814748
+-0.540346 0.203372 0.106574 -0.809511
+-0.55249 0.167596 0.159291 -0.800808
+-0.562268 0.131103 0.211325 -0.788675
+-0.569639 0.0940478 0.262454 -0.773165
+-0.57457 0.0565902 0.31246 -0.754344
+-0.577041 0.0188904 0.361127 -0.732294
+0.612045 0.0200363 -0.460489 0.642612
+0.609424 0.060023 -0.501532 0.611118
+0.604193 0.0997527 -0.540427 0.577008
+0.596375 0.139055 -0.577008 0.540427
+0.586004 0.177762 -0.611118 0.501532
+0.573123 0.215708 -0.642612 0.460489
+0.557788 0.25273 -0.671353 0.417474
+0.540064 0.28867 -0.69722 0.372672
+0.520028 0.323374 -0.720101 0.326274
+0.497765 0.356693 -0.739899 0.278478
+0.47337 0.388485 -0.756528 0.22949
+0.446949 0.418613 -0.769917 0.17952
+0.418613 0.446949 -0.78001 0.12878
+0.388485 0.47337 -0.786763 0.0774894
+0.356693 0.497765 -0.790146 0.0258667
+0.323374 0.520028 -0.790146 -0.0258668
+0.28867 0.540064 -0.786763 -0.0774894
+0.25273 0.557788 -0.78001 -0.12878
+0.215708 0.573123 -0.769917 -0.17952
+0.177762 0.586004 -0.756528 -0.22949
+0.139055 0.596375 -0.739899 -0.278478
+0.0997526 0.604193 -0.720101 -0.326274
+0.0600229 0.609424 -0.69722 -0.372672
+0.0200362 0.612045 -0.671353 -0.417474
+-0.0200363 0.612045 -0.642612 -0.460489
+-0.060023 0.609424 -0.611118 -0.501532
+-0.0997527 0.604193 -0.577008 -0.540427
+-0.139055 0.596375 -0.540427 -0.577008
+-0.177762 0.586004 -0.501532 -0.611119
+-0.215708 0.573123 -0.460489 -0.642612
+-0.25273 0.557788 -0.417474 -0.671353
+-0.28867 0.540064 -0.372672 -0.69722
+-0.323374 0.520028 -0.326274 -0.720101
+-0.356693 0.497765 -0.278478 -0.739899
+-0.388485 0.47337 -0.22949 -0.756528
+-0.418613 0.446949 -0.179519 -0.769917
+-0.446949 0.418613 -0.12878 -0.78001
+-0.47337 0.388485 -0.0774893 -0.786763
+-0.497765 0.356693 -0.0258668 -0.790146
+-0.520028 0.323374 0.0258667 -0.790146
+-0.540064 0.28867 0.0774893 -0.786763
+-0.557788 0.25273 0.12878 -0.78001
+-0.573123 0.215708 0.17952 -0.769917
+-0.586004 0.177762 0.22949 -0.756528
+-0.596375 0.139055 0.278478 -0.739899
+-0.604193 0.0997527 0.326274 -0.720101
+-0.609424 0.060023 0.372672 -0.69722
+-0.612045 0.0200363 0.417474 -0.671353
+0.612045 0.0200363 -0.577008 0.540427
+0.609424 0.060023 -0.611118 0.501532
+0.604193 0.0997527 -0.642612 0.460489
+0.596375 0.139055 -0.671353 0.417474
+0.586004 0.177762 -0.69722 0.372672
+0.573123 0.215708 -0.720101 0.326274
+0.557788 0.25273 -0.739899 0.278478
+0.540064 0.28867 -0.756528 0.22949
+0.520028 0.323374 -0.769917 0.17952
+0.497765 0.356693 -0.78001 0.12878
+0.47337 0.388485 -0.786763 0.0774894
+0.446949 0.418613 -0.790146 0.0258667
+0.418613 0.446949 -0.790146 -0.0258667
+0.388485 0.47337 -0.786763 -0.0774894
+0.356693 0.497765 -0.78001 -0.12878
+0.323374 0.520028 -0.769917 -0.17952
+0.28867 0.540064 -0.756528 -0.22949
+0.25273 0.557788 -0.739899 -0.278478
+0.215708 0.573123 -0.720101 -0.326274
+0.177762 0.586004 -0.69722 -0.372672
+0.139055 0.596375 -0.671353 -0.417474
+0.0997526 0.604193 -0.642612 -0.460489
+0.0600229 0.609424 -0.611118 -0.501532
+0.0200362 0.612045 -0.577008 -0.540427
+-0.0200363 0.612045 -0.540427 -0.577008
+-0.060023 0.609424 -0.501532 -0.611118
+-0.0997527 0.604193 -0.460489 -0.642612
+-0.139055 0.596375 -0.417474 -0.671353
+-0.177762 0.586004 -0.372672 -0.69722
+-0.215708 0.573123 -0.326273 -0.720101
+-0.25273 0.557788 -0.278478 -0.739899
+-0.28867 0.540064 -0.22949 -0.756528
+-0.323374 0.520028 -0.17952 -0.769917
+-0.356693 0.497765 -0.12878 -0.78001
+-0.388485 0.47337 -0.0774894 -0.786763
+-0.418613 0.446949 -0.0258666 -0.790146
+-0.446949 0.418613 0.0258667 -0.790146
+-0.47337 0.388485 0.0774894 -0.786763
+-0.497765 0.356693 0.12878 -0.78001
+-0.520028 0.323374 0.17952 -0.769917
+-0.540064 0.28867 0.22949 -0.756528
+-0.557788 0.25273 0.278478 -0.739899
+-0.573123 0.215708 0.326274 -0.720101
+-0.586004 0.177762 0.372672 -0.69722
+-0.596375 0.139055 0.417474 -0.671353
+-0.604193 0.0997527 0.460489 -0.642612
+-0.609424 0.060023 0.501532 -0.611118
+-0.612045 0.0200363 0.540427 -0.577008
+0.645152 0.0211201 -0.605934 0.464949
+0.642389 0.0632698 -0.635045 0.424324
+0.636876 0.105149 -0.661438 0.381881
+0.628635 0.146577 -0.684998 0.337804
+0.617702 0.187378 -0.705625 0.292279
+0.604125 0.227376 -0.72323 0.245503
+0.58796 0.266401 -0.737738 0.197676
+0.569278 0.304285 -0.749087 0.149003
+0.548158 0.340866 -0.757229 0.099691
+0.52469 0.375988 -0.762127 0.0499525
+0.498976 0.409499 -0.763763 1.38464e-08
+0.471125 0.441257 -0.762127 -0.0499525
+0.441257 0.471125 -0.757229 -0.099691
+0.409499 0.498976 -0.749087 -0.149003
+0.375988 0.52469 -0.737738 -0.197676
+0.340866 0.548158 -0.72323 -0.245504
+0.304285 0.569278 -0.705625 -0.292279
+0.266401 0.58796 -0.684998 -0.337804
+0.227376 0.604125 -0.661438 -0.381881
+0.187378 0.617702 -0.635045 -0.424324
+0.146577 0.628635 -0.605934 -0.464949
+0.105148 0.636876 -0.574227 -0.503584
+0.0632697 0.642389 -0.540062 -0.540062
+0.02112 0.645152 -0.503584 -0.574227
+-0.0211201 0.645152 -0.464949 -0.605934
+-0.0632698 0.642389 -0.424324 -0.635045
+-0.105149 0.636876 -0.381881 -0.661438
+-0.146577 0.628635 -0.337804 -0.684998
+-0.187378 0.617702 -0.292279 -0.705625
+-0.227377 0.604125 -0.245503 -0.72323
+-0.266401 0.58796 -0.197676 -0.737738
+-0.304285 0.569278 -0.149003 -0.749087
+-0.340866 0.548158 -0.099691 -0.757229
+-0.375988 0.52469 -0.0499524 -0.762127
+-0.409499 0.498976 -2.59851e-08 -0.763763
+-0.441257 0.471125 0.0499525 -0.762127
+-0.471125 0.441257 0.099691 -0.757229
+-0.498976 0.409499 0.149003 -0.749087
+-0.52469 0.375988 0.197676 -0.737738
+-0.548158 0.340866 0.245503 -0.72323
+-0.569278 0.304285 0.292279 -0.705625
+-0.58796 0.266401 0.337804 -0.684998
+-0.604125 0.227376 0.381881 -0.661438
+-0.617702 0.187378 0.424324 -0.635045
+-0.628635 0.146577 0.464949 -0.605934
+-0.636876 0.105149 0.503584 -0.574227
+-0.642389 0.0632698 0.540062 -0.540062
+-0.645152 0.0211201 0.574227 -0.503584
+0.645152 0.0211201 -0.503584 0.574227
+0.642389 0.0632698 -0.540062 0.540062
+0.636876 0.105149 -0.574227 0.503584
+0.628635 0.146577 -0.605934 0.464949
+0.617702 0.187378 -0.635045 0.424324
+0.604125 0.227376 -0.661438 0.381881
+0.58796 0.266401 -0.684998 0.337804
+0.569278 0.304285 -0.705625 0.292279
+0.548158 0.340866 -0.72323 0.245503
+0.52469 0.375988 -0.737738 0.197676
+0.498976 0.409499 -0.749087 0.149003
+0.471125 0.441257 -0.757229 0.099691
+0.441257 0.471125 -0.762127 0.0499525
+0.409499 0.498976 -0.763763 -1.70771e-09
+0.375988 0.52469 -0.762127 -0.0499525
+0.340866 0.548158 -0.757229 -0.0996911
+0.304285 0.569278 -0.749087 -0.149003
+0.266401 0.58796 -0.737738 -0.197676
+0.227376 0.604125 -0.72323 -0.245503
+0.187378 0.617702 -0.705625 -0.292279
+0.146577 0.628635 -0.684998 -0.337804
+0.105148 0.636876 -0.661438 -0.381881
+0.0632697 0.642389 -0.635045 -0.424324
+0.02112 0.645152 -0.605934 -0.464949
+-0.0211201 0.645152 -0.574227 -0.503584
+-0.0632698 0.642389 -0.540062 -0.540062
+-0.105149 0.636876 -0.503584 -0.574227
+-0.146577 0.628635 -0.464949 -0.605934
+-0.187378 0.617702 -0.424324 -0.635045
+-0.227377 0.604125 -0.381881 -0.661438
+-0.266401 0.58796 -0.337803 -0.684998
+-0.304285 0.569278 -0.292279 -0.705625
+-0.340866 0.548158 -0.245503 -0.72323
+-0.375988 0.52469 -0.197676 -0.737738
+-0.409499 0.498976 -0.149003 -0.749087
+-0.441257 0.471125 -0.099691 -0.757229
+-0.471125 0.441257 -0.0499525 -0.762127
+-0.498976 0.409499 3.50928e-08 -0.763763
+-0.52469 0.375988 0.0499524 -0.762127
+-0.548158 0.340866 0.099691 -0.757229
+-0.569278 0.304285 0.149003 -0.749087
+-0.58796 0.266401 0.197676 -0.737738
+-0.604125 0.227376 0.245504 -0.72323
+-0.617702 0.187378 0.292279 -0.705625
+-0.628635 0.146577 0.337804 -0.684998
+-0.636876 0.105149 0.381881 -0.661438
+-0.642389 0.0632698 0.424324 -0.635045
+-0.645152 0.0211201 0.464949 -0.605934
+0.676641 0.0221509 -0.537165 0.50311
+0.673743 0.0663579 -0.56892 0.466901
+0.667961 0.110281 -0.598239 0.428692
+0.659318 0.153731 -0.624996 0.388647
+0.647852 0.196524 -0.649076 0.346939
+0.633611 0.238474 -0.670378 0.303744
+0.616658 0.279404 -0.688808 0.259249
+0.597064 0.319137 -0.704289 0.213644
+0.574913 0.357504 -0.716754 0.167124
+0.5503 0.394339 -0.72615 0.119888
+0.523331 0.429486 -0.732436 0.0721387
+0.49412 0.462794 -0.735586 0.0240806
+0.462794 0.49412 -0.735586 -0.0240806
+0.429486 0.523331 -0.732436 -0.0721387
+0.394339 0.5503 -0.72615 -0.119888
+0.357504 0.574913 -0.716754 -0.167124
+0.319137 0.597064 -0.704289 -0.213644
+0.279404 0.616658 -0.688808 -0.259249
+0.238474 0.633611 -0.670378 -0.303744
+0.196524 0.647852 -0.649076 -0.346939
+0.153731 0.659318 -0.624996 -0.388647
+0.110281 0.667961 -0.598239 -0.428692
+0.0663578 0.673743 -0.56892 -0.466901
+0.0221508 0.676641 -0.537165 -0.50311
+-0.022151 0.676641 -0.50311 -0.537165
+-0.066358 0.673743 -0.466901 -0.56892
+-0.110281 0.667961 -0.428692 -0.598239
+-0.153731 0.659318 -0.388647 -0.624996
+-0.196524 0.647852 -0.346938 -0.649077
+-0.238475 0.633611 -0.303744 -0.670378
+-0.279404 0.616658 -0.259249 -0.688808
+-0.319137 0.597064 -0.213644 -0.704289
+-0.357504 0.574913 -0.167124 -0.716754
+-0.394339 0.5503 -0.119888 -0.72615
+-0.429486 0.523331 -0.0721387 -0.732436
+-0.462794 0.49412 -0.0240805 -0.735586
+-0.49412 0.462794 0.0240805 -0.735586
+-0.523331 0.429486 0.0721387 -0.732436
+-0.5503 0.394339 0.119888 -0.72615
+-0.574913 0.357504 0.167124 -0.716754
+-0.597063 0.319137 0.213644 -0.704289
+-0.616658 0.279404 0.259249 -0.688808
+-0.633611 0.238474 0.303744 -0.670378
+-0.647852 0.196524 0.346939 -0.649076
+-0.659318 0.153731 0.388647 -0.624996
+-0.667961 0.110281 0.428692 -0.598239
+-0.673743 0.0663579 0.466901 -0.56892
+-0.676641 0.022151 0.50311 -0.537165
+0.0510037 0.00166969 -0.682702 -0.728913
+0.0507853 0.00500192 -0.633567 -0.772003
+0.0503494 0.00831272 -0.581719 -0.811788
+0.049698 0.0115879 -0.52738 -0.848096
+0.0488337 0.0148135 -0.470783 -0.880772
+0.0477602 0.0179757 -0.412169 -0.909677
+0.0464823 0.0210609 -0.351791 -0.934687
+0.0450054 0.0240559 -0.289906 -0.955694
+0.0433357 0.0269479 -0.22678 -0.972608
+0.0414804 0.0297244 -0.162683 -0.985358
+0.0394475 0.0323737 -0.0978894 -0.993888
+0.0372457 0.0348844 -0.0326764 -0.998162
+0.0348844 0.0372457 0.0326765 -0.998162
+0.0323737 0.0394475 0.0978894 -0.993888
+0.0297244 0.0414804 0.162683 -0.985358
+0.0269478 0.0433357 0.22678 -0.972608
+0.0240559 0.0450054 0.289907 -0.955693
+0.0210609 0.0464823 0.351791 -0.934686
+0.0179757 0.0477603 0.412169 -0.909677
+0.0148135 0.0488337 0.470783 -0.880772
+0.0115879 0.049698 0.52738 -0.848096
+0.00831272 0.0503494 0.581719 -0.811788
+0.00500191 0.0507853 0.633567 -0.772003
+0.00166968 0.0510037 0.682702 -0.728913
+-0.00166969 0.0510037 0.728913 -0.682702
+-0.00500192 0.0507853 0.772003 -0.633567
+-0.00831273 0.0503494 0.811788 -0.581719
+-0.0115879 0.049698 0.848096 -0.52738
+-0.0148135 0.0488337 0.880772 -0.470782
+-0.0179757 0.0477602 0.909677 -0.412169
+-0.0210609 0.0464823 0.934687 -0.351791
+-0.0240559 0.0450054 0.955693 -0.289907
+-0.0269478 0.0433357 0.972608 -0.22678
+-0.0297244 0.0414804 0.985358 -0.162683
+-0.0323737 0.0394475 0.993888 -0.0978895
+-0.0348844 0.0372457 0.998162 -0.0326764
+-0.0372457 0.0348844 0.998162 0.0326764
+-0.0394475 0.0323737 0.993888 0.0978895
+-0.0414804 0.0297244 0.985358 0.162683
+-0.0433357 0.0269478 0.972608 0.22678
+-0.0450054 0.0240559 0.955694 0.289906
+-0.0464823 0.0210609 0.934687 0.351791
+-0.0477603 0.0179757 0.909677 0.412169
+-0.0488337 0.0148135 0.880772 0.470783
+-0.049698 0.0115879 0.848096 0.52738
+-0.0503494 0.00831273 0.811788 0.581719
+-0.0507853 0.00500191 0.772003 0.633567
+-0.0510037 0.00166969 0.728913 0.682702
+0.102007 0.00333938 -0.350411 -0.931019
+0.101571 0.0100038 -0.288769 -0.951943
+0.100699 0.0166254 -0.22589 -0.968791
+0.0993959 0.0231759 -0.162045 -0.981491
+0.0976673 0.0296271 -0.0975053 -0.989988
+0.0955205 0.0359514 -0.0325482 -0.994245
+0.0929646 0.0421217 0.0325482 -0.994245
+0.0900107 0.0481117 0.0975053 -0.989988
+0.0866713 0.0538957 0.162045 -0.981491
+0.0829608 0.0594489 0.22589 -0.968791
+0.0788951 0.0647475 0.288769 -0.951943
+0.0744914 0.0697688 0.350411 -0.931019
+0.0697688 0.0744914 0.410552 -0.906107
+0.0647475 0.078895 0.468935 -0.877316
+0.0594489 0.0829608 0.52531 -0.844768
+0.0538957 0.0866713 0.579436 -0.808602
+0.0481117 0.0900107 0.631081 -0.768974
+0.0421217 0.0929647 0.680023 -0.726053
+0.0359514 0.0955205 0.726053 -0.680023
+0.0296271 0.0976673 0.768974 -0.63108
+0.0231759 0.0993959 0.808602 -0.579436
+0.0166254 0.100699 0.844768 -0.52531
+0.0100038 0.101571 0.877316 -0.468935
+0.00333937 0.102007 0.906107 -0.410552
+-0.00333939 0.102007 0.931019 -0.350411
+-0.0100038 0.101571 0.951943 -0.288769
+-0.0166255 0.100699 0.968791 -0.22589
+-0.0231759 0.0993959 0.981491 -0.162045
+-0.0296271 0.0976673 0.989988 -0.0975051
+-0.0359514 0.0955205 0.994245 -0.0325481
+-0.0421217 0.0929646 0.994245 0.0325484
+-0.0481117 0.0900107 0.989988 0.0975052
+-0.0538957 0.0866713 0.981491 0.162045
+-0.0594489 0.0829608 0.968791 0.225891
+-0.0647475 0.0788951 0.951943 0.288769
+-0.0697689 0.0744914 0.931019 0.350411
+-0.0744914 0.0697689 0.906107 0.410552
+-0.0788951 0.0647475 0.877316 0.468935
+-0.0829608 0.0594489 0.844768 0.52531
+-0.0866713 0.0538957 0.808602 0.579436
+-0.0900107 0.0481117 0.768974 0.63108
+-0.0929646 0.0421217 0.726053 0.680023
+-0.0955205 0.0359514 0.680023 0.726053
+-0.0976673 0.0296271 0.631081 0.768974
+-0.0993959 0.0231759 0.579436 0.808602
+-0.100699 0.0166255 0.52531 0.844768
+-0.101571 0.0100038 0.468935 0.877316
+-0.102007 0.00333939 0.410552 0.906107
+0.102007 0.00333938 -0.906107 -0.410552
+0.101571 0.0100038 -0.877316 -0.468935
+0.100699 0.0166254 -0.844768 -0.52531
+0.0993959 0.0231759 -0.808602 -0.579436
+0.0976673 0.0296271 -0.768974 -0.631081
+0.0955205 0.0359514 -0.726053 -0.680023
+0.0929646 0.0421217 -0.680023 -0.726053
+0.0900107 0.0481117 -0.63108 -0.768974
+0.0866713 0.0538957 -0.579436 -0.808602
+0.0829608 0.0594489 -0.52531 -0.844768
+0.0788951 0.0647475 -0.468935 -0.877316
+0.0744914 0.0697688 -0.410552 -0.906107
+0.0697688 0.0744914 -0.350411 -0.931019
+0.0647475 0.078895 -0.288769 -0.951943
+0.0594489 0.0829608 -0.22589 -0.968791
+0.0538957 0.0866713 -0.162045 -0.981491
+0.0481117 0.0900107 -0.0975052 -0.989988
+0.0421217 0.0929647 -0.0325482 -0.994245
+0.0359514 0.0955205 0.0325483 -0.994245
+0.0296271 0.0976673 0.0975053 -0.989988
+0.0231759 0.0993959 0.162045 -0.981491
+0.0166254 0.100699 0.225891 -0.968791
+0.0100038 0.101571 0.288769 -0.951943
+0.00333937 0.102007 0.350411 -0.931019
+-0.00333939 0.102007 0.410552 -0.906107
+-0.0100038 0.101571 0.468935 -0.877316
+-0.0166255 0.100699 0.52531 -0.844768
+-0.0231759 0.0993959 0.579436 -0.808602
+-0.0296271 0.0976673 0.631081 -0.768974
+-0.0359514 0.0955205 0.680023 -0.726053
+-0.0421217 0.0929646 0.726053 -0.680023
+-0.0481117 0.0900107 0.768974 -0.631081
+-0.0538957 0.0866713 0.808602 -0.579436
+-0.0594489 0.0829608 0.844768 -0.52531
+-0.0647475 0.0788951 0.877316 -0.468935
+-0.0697689 0.0744914 0.906107 -0.410552
+-0.0744914 0.0697689 0.931019 -0.350411
+-0.0788951 0.0647475 0.951943 -0.288769
+-0.0829608 0.0594489 0.968791 -0.225891
+-0.0866713 0.0538957 0.981491 -0.162045
+-0.0900107 0.0481117 0.989988 -0.0975054
+-0.0929646 0.0421217 0.994245 -0.0325482
+-0.0955205 0.0359514 0.994245 0.0325483
+-0.0976673 0.0296271 0.989988 0.0975053
+-0.0993959 0.0231759 0.981491 0.162045
+-0.100699 0.0166255 0.968791 0.22589
+-0.101571 0.0100038 0.951943 0.288769
+-0.102007 0.00333939 0.931019 0.350411
+0.153011 0.00500907 -0.675534 -0.72126
+0.152356 0.0150057 -0.626915 -0.763898
+0.151048 0.0249382 -0.575611 -0.803265
+0.149094 0.0347638 -0.521843 -0.839192
+0.146501 0.0444406 -0.46584 -0.871525
+0.143281 0.0539271 -0.407842 -0.900126
+0.139447 0.0631826 -0.348098 -0.924873
+0.135016 0.0721676 -0.286863 -0.94566
+0.130007 0.0808436 -0.224399 -0.962397
+0.124441 0.0891733 -0.160975 -0.975013
+0.118343 0.0971212 -0.0968617 -0.983453
+0.111737 0.104653 -0.0323334 -0.987683
+0.104653 0.111737 0.0323334 -0.987683
+0.0971212 0.118343 0.0968617 -0.983453
+0.0891733 0.124441 0.160975 -0.975013
+0.0808435 0.130007 0.2244 -0.962397
+0.0721676 0.135016 0.286863 -0.94566
+0.0631826 0.139447 0.348098 -0.924873
+0.053927 0.143281 0.407842 -0.900126
+0.0444406 0.146501 0.46584 -0.871525
+0.0347638 0.149094 0.521843 -0.839192
+0.0249382 0.151048 0.575611 -0.803265
+0.0150057 0.152356 0.626915 -0.763898
+0.00500905 0.153011 0.675534 -0.72126
+-0.00500908 0.153011 0.72126 -0.675534
+-0.0150058 0.152356 0.763898 -0.626915
+-0.0249382 0.151048 0.803265 -0.575611
+-0.0347638 0.149094 0.839192 -0.521843
+-0.0444406 0.146501 0.871525 -0.46584
+-0.0539271 0.143281 0.900126 -0.407842
+-0.0631826 0.139447 0.924873 -0.348098
+-0.0721676 0.135016 0.94566 -0.286863
+-0.0808435 0.130007 0.962397 -0.224399
+-0.0891733 0.124441 0.975013 -0.160975
+-0.0971212 0.118343 0.983453 -0.0968617
+-0.104653 0.111737 0.987683 -0.0323333
+-0.111737 0.104653 0.987683 0.0323333
+-0.118343 0.0971212 0.983453 0.0968617
+-0.124441 0.0891733 0.975013 0.160975
+-0.130007 0.0808435 0.962397 0.224399
+-0.135016 0.0721676 0.94566 0.286863
+-0.139447 0.0631826 0.924873 0.348098
+-0.143281 0.053927 0.900126 0.407842
+-0.146501 0.0444406 0.871525 0.46584
+-0.149094 0.0347638 0.839192 0.521843
+-0.151048 0.0249382 0.803265 0.575611
+-0.152356 0.0150057 0.763898 0.626915
+-0.153011 0.00500908 0.72126 0.675534
+0.153011 0.00500907 -0.224399 -0.962397
+0.152356 0.0150057 -0.160975 -0.975013
+0.151048 0.0249382 -0.0968617 -0.983453
+0.149094 0.0347638 -0.0323334 -0.987683
+0.146501 0.0444406 0.0323334 -0.987683
+0.143281 0.0539271 0.0968617 -0.983453
+0.139447 0.0631826 0.160975 -0.975013
+0.135016 0.0721676 0.224399 -0.962397
+0.130007 0.0808436 0.286863 -0.94566
+0.124441 0.0891733 0.348098 -0.924873
+0.118343 0.0971212 0.407842 -0.900126
+0.111737 0.104653 0.46584 -0.871525
+0.104653 0.111737 0.521843 -0.839192
+0.0971212 0.118343 0.575611 -0.803265
+0.0891733 0.124441 0.626915 -0.763898
+0.0808435 0.130007 0.675534 -0.72126
+0.0721676 0.135016 0.72126 -0.675534
+0.0631826 0.139447 0.763898 -0.626915
+0.053927 0.143281 0.803265 -0.575611
+0.0444406 0.146501 0.839192 -0.521843
+0.0347638 0.149094 0.871525 -0.46584
+0.0249382 0.151048 0.900126 -0.407842
+0.0150057 0.152356 0.924873 -0.348098
+0.00500905 0.153011 0.94566 -0.286863
+-0.00500908 0.153011 0.962397 -0.224399
+-0.0150058 0.152356 0.975013 -0.160975
+-0.0249382 0.151048 0.983453 -0.0968616
+-0.0347638 0.149094 0.987683 -0.0323333
+-0.0444406 0.146501 0.987683 0.0323335
+-0.0539271 0.143281 0.983453 0.0968618
+-0.0631826 0.139447 0.975013 0.160975
+-0.0721676 0.135016 0.962397 0.224399
+-0.0808435 0.130007 0.94566 0.286863
+-0.0891733 0.124441 0.924873 0.348098
+-0.0971212 0.118343 0.900126 0.407842
+-0.104653 0.111737 0.871525 0.46584
+-0.111737 0.104653 0.839192 0.521843
+-0.118343 0.0971212 0.803265 0.575611
+-0.124441 0.0891733 0.763898 0.626915
+-0.130007 0.0808435 0.72126 0.675534
+-0.135016 0.0721676 0.675534 0.72126
+-0.139447 0.0631826 0.626915 0.763898
+-0.143281 0.053927 0.575611 0.803265
+-0.146501 0.0444406 0.521843 0.839192
+-0.149094 0.0347638 0.46584 0.871525
+-0.151048 0.0249382 0.407842 0.900126
+-0.152356 0.0150057 0.348098 0.924873
+-0.153011 0.00500908 0.286863 0.94566
+0.204015 0.00667875 -0.159466 -0.96587
+0.203141 0.0200077 -0.0959534 -0.974231
+0.201398 0.0332509 -0.0320302 -0.978421
+0.198792 0.0463518 0.0320302 -0.978421
+0.195335 0.0592541 0.0959534 -0.974231
+0.191041 0.0719027 0.159466 -0.96587
+0.185929 0.0842435 0.222295 -0.953372
+0.180021 0.0962235 0.284173 -0.936792
+0.173343 0.107791 0.344833 -0.9162
+0.165922 0.118898 0.404017 -0.891686
+0.15779 0.129495 0.461471 -0.863352
+0.148983 0.139538 0.516949 -0.831322
+0.139538 0.148983 0.570214 -0.795732
+0.129495 0.15779 0.621036 -0.756735
+0.118898 0.165922 0.669199 -0.714497
+0.107791 0.173343 0.714497 -0.669199
+0.0962234 0.180021 0.756735 -0.621036
+0.0842435 0.185929 0.795732 -0.570214
+0.0719027 0.191041 0.831322 -0.516949
+0.0592541 0.195335 0.863352 -0.461471
+0.0463517 0.198792 0.891686 -0.404017
+0.0332509 0.201398 0.9162 -0.344833
+0.0200076 0.203141 0.936792 -0.284173
+0.00667873 0.204015 0.953372 -0.222295
+-0.00667877 0.204015 0.96587 -0.159466
+-0.0200077 0.203141 0.974231 -0.0959533
+-0.0332509 0.201398 0.978421 -0.0320301
+-0.0463518 0.198792 0.978421 0.0320302
+-0.0592541 0.195335 0.974231 0.0959535
+-0.0719028 0.191041 0.96587 0.159466
+-0.0842435 0.185929 0.953372 0.222295
+-0.0962234 0.180021 0.936792 0.284173
+-0.107791 0.173343 0.9162 0.344833
+-0.118898 0.165922 0.891686 0.404018
+-0.129495 0.15779 0.863352 0.461471
+-0.139538 0.148983 0.831322 0.516949
+-0.148983 0.139538 0.795732 0.570214
+-0.15779 0.129495 0.756735 0.621036
+-0.165922 0.118898 0.714497 0.669199
+-0.173343 0.107791 0.669199 0.714497
+-0.180021 0.0962235 0.621036 0.756735
+-0.185929 0.0842435 0.570214 0.795732
+-0.191041 0.0719027 0.516949 0.831322
+-0.195335 0.0592541 0.461472 0.863352
+-0.198792 0.0463517 0.404017 0.891686
+-0.201398 0.0332509 0.344833 0.9162
+-0.203141 0.0200077 0.284173 0.936792
+-0.204015 0.00667877 0.222295 0.953372
+0.204015 0.00667875 -0.516949 -0.831322
+0.203141 0.0200077 -0.461471 -0.863352
+0.201398 0.0332509 -0.404017 -0.891686
+0.198792 0.0463518 -0.344833 -0.9162
+0.195335 0.0592541 -0.284173 -0.936792
+0.191041 0.0719027 -0.222295 -0.953372
+0.185929 0.0842435 -0.159466 -0.96587
+0.180021 0.0962235 -0.0959534 -0.974231
+0.173343 0.107791 -0.0320302 -0.978421
+0.165922 0.118898 0.0320302 -0.978421
+0.15779 0.129495 0.0959534 -0.974231
+0.148983 0.139538 0.159466 -0.96587
+0.139538 0.148983 0.222295 -0.953372
+0.129495 0.15779 0.284173 -0.936792
+0.118898 0.165922 0.344833 -0.9162
+0.107791 0.173343 0.404018 -0.891686
+0.0962234 0.180021 0.461472 -0.863352
+0.0842435 0.185929 0.516949 -0.831322
+0.0719027 0.191041 0.570214 -0.795732
+0.0592541 0.195335 0.621036 -0.756735
+0.0463517 0.198792 0.669199 -0.714497
+0.0332509 0.201398 0.714497 -0.669199
+0.0200076 0.203141 0.756735 -0.621036
+0.00667873 0.204015 0.795732 -0.570214
+-0.00667877 0.204015 0.831322 -0.516949
+-0.0200077 0.203141 0.863352 -0.461471
+-0.0332509 0.201398 0.891686 -0.404017
+-0.0463518 0.198792 0.9162 -0.344833
+-0.0592541 0.195335 0.936792 -0.284173
+-0.0719028 0.191041 0.953372 -0.222295
+-0.0842435 0.185929 0.96587 -0.159466
+-0.0962234 0.180021 0.974231 -0.0959535
+-0.107791 0.173343 0.978421 -0.0320302
+-0.118898 0.165922 0.978421 0.0320303
+-0.129495 0.15779 0.974231 0.0959534
+-0.139538 0.148983 0.96587 0.159466
+-0.148983 0.139538 0.953372 0.222295
+-0.15779 0.129495 0.936792 0.284173
+-0.165922 0.118898 0.9162 0.344833
+-0.173343 0.107791 0.891686 0.404018
+-0.180021 0.0962235 0.863352 0.461471
+-0.185929 0.0842435 0.831322 0.516949
+-0.191041 0.0719027 0.795732 0.570214
+-0.195335 0.0592541 0.756735 0.621036
+-0.198792 0.0463517 0.714497 0.669199
+-0.201398 0.0332509 0.669199 0.714497
+-0.203141 0.0200077 0.621036 0.756735
+-0.204015 0.00667877 0.570214 0.795732
+0.255019 0.00834844 -0.41054 -0.875416
+0.253927 0.0250096 -0.352407 -0.900392
+0.251747 0.0415636 -0.292764 -0.921513
+0.24849 0.0579397 -0.231867 -0.938687
+0.244168 0.0740676 -0.169977 -0.951842
+0.238801 0.0898784 -0.10736 -0.960921
+0.232412 0.105304 -0.0442829 -0.965886
+0.225027 0.120279 0.0189838 -0.966714
+0.216678 0.134739 0.0821693 -0.963402
+0.207402 0.148622 0.145003 -0.955965
+0.197238 0.161869 0.207216 -0.944435
+0.186229 0.174422 0.268541 -0.92886
+0.174422 0.186229 0.328716 -0.909308
+0.161869 0.197238 0.387484 -0.885862
+0.148622 0.207402 0.444593 -0.858623
+0.134739 0.216678 0.499797 -0.827707
+0.120279 0.225027 0.552862 -0.793246
+0.105304 0.232412 0.603559 -0.755389
+0.0898784 0.238801 0.651671 -0.714297
+0.0740676 0.244168 0.696993 -0.670146
+0.0579397 0.24849 0.739331 -0.623126
+0.0415636 0.251747 0.778502 -0.573437
+0.0250096 0.253927 0.81434 -0.521293
+0.00834842 0.255019 0.846691 -0.466916
+-0.00834847 0.255019 0.875416 -0.41054
+-0.0250096 0.253927 0.900392 -0.352406
+-0.0415636 0.251747 0.921513 -0.292764
+-0.0579397 0.24849 0.938687 -0.231867
+-0.0740677 0.244168 0.951842 -0.169977
+-0.0898785 0.238801 0.960921 -0.10736
+-0.105304 0.232412 0.965886 -0.0442828
+-0.120279 0.225027 0.966714 0.0189837
+-0.134739 0.216678 0.963402 0.0821693
+-0.148622 0.207402 0.955965 0.145003
+-0.161869 0.197238 0.944435 0.207216
+-0.174422 0.186229 0.92886 0.268541
+-0.186229 0.174422 0.909308 0.328716
+-0.197238 0.161869 0.885862 0.387484
+-0.207402 0.148622 0.858623 0.444593
+-0.216678 0.134739 0.827707 0.499797
+-0.225027 0.120279 0.793246 0.552862
+-0.232412 0.105304 0.755389 0.603559
+-0.238801 0.0898784 0.714297 0.651672
+-0.244168 0.0740676 0.670146 0.696993
+-0.24849 0.0579397 0.623126 0.739331
+-0.251747 0.0415636 0.573437 0.778502
+-0.253927 0.0250096 0.521293 0.81434
+-0.255019 0.00834847 0.466916 0.846691
+0.153011 0.00500907 -0.94566 -0.286863
+0.152356 0.0150057 -0.924873 -0.348098
+0.151048 0.0249382 -0.900126 -0.407842
+0.149094 0.0347638 -0.871525 -0.46584
+0.146501 0.0444406 -0.839192 -0.521843
+0.143281 0.0539271 -0.803265 -0.575611
+0.139447 0.0631826 -0.763898 -0.626915
+0.135016 0.0721676 -0.72126 -0.675534
+0.130007 0.0808436 -0.675534 -0.72126
+0.124441 0.0891733 -0.626915 -0.763898
+0.118343 0.0971212 -0.575611 -0.803265
+0.111737 0.104653 -0.521843 -0.839192
+0.104653 0.111737 -0.46584 -0.871525
+0.0971212 0.118343 -0.407842 -0.900126
+0.0891733 0.124441 -0.348098 -0.924873
+0.0808435 0.130007 -0.286863 -0.94566
+0.0721676 0.135016 -0.224399 -0.962397
+0.0631826 0.139447 -0.160975 -0.975013
+0.053927 0.143281 -0.0968616 -0.983453
+0.0444406 0.146501 -0.0323333 -0.987683
+0.0347638 0.149094 0.0323334 -0.987683
+0.0249382 0.151048 0.0968618 -0.983453
+0.0150057 0.152356 0.160975 -0.975013
+0.00500905 0.153011 0.2244 -0.962397
+-0.00500908 0.153011 0.286863 -0.94566
+-0.0150058 0.152356 0.348098 -0.924873
+-0.0249382 0.151048 0.407842 -0.900126
+-0.0347638 0.149094 0.46584 -0.871525
+-0.0444406 0.146501 0.521843 -0.839192
+-0.0539271 0.143281 0.575611 -0.803265
+-0.0631826 0.139447 0.626915 -0.763898
+-0.0721676 0.135016 0.675534 -0.72126
+-0.0808435 0.130007 0.72126 -0.675534
+-0.0891733 0.124441 0.763898 -0.626915
+-0.0971212 0.118343 0.803265 -0.575611
+-0.104653 0.111737 0.839192 -0.521843
+-0.111737 0.104653 0.871525 -0.46584
+-0.118343 0.0971212 0.900126 -0.407842
+-0.124441 0.0891733 0.924873 -0.348098
+-0.130007 0.0808435 0.94566 -0.286863
+-0.135016 0.0721676 0.962397 -0.2244
+-0.139447 0.0631826 0.975013 -0.160975
+-0.143281 0.053927 0.983453 -0.0968616
+-0.146501 0.0444406 0.987683 -0.0323334
+-0.149094 0.0347638 0.987683 0.0323335
+-0.151048 0.0249382 0.983453 0.0968616
+-0.152356 0.0150057 0.975013 0.160975
+-0.153011 0.00500908 0.962397 0.224399
+0.204015 0.00667875 -0.795732 -0.570214
+0.203141 0.0200077 -0.756735 -0.621036
+0.201398 0.0332509 -0.714497 -0.669199
+0.198792 0.0463518 -0.669199 -0.714497
+0.195335 0.0592541 -0.621036 -0.756735
+0.191041 0.0719027 -0.570214 -0.795732
+0.185929 0.0842435 -0.516949 -0.831322
+0.180021 0.0962235 -0.461471 -0.863352
+0.173343 0.107791 -0.404017 -0.891686
+0.165922 0.118898 -0.344833 -0.9162
+0.15779 0.129495 -0.284173 -0.936792
+0.148983 0.139538 -0.222295 -0.953372
+0.139538 0.148983 -0.159466 -0.96587
+0.129495 0.15779 -0.0959534 -0.974231
+0.118898 0.165922 -0.0320301 -0.978421
+0.107791 0.173343 0.0320303 -0.978421
+0.0962234 0.180021 0.0959535 -0.974231
+0.0842435 0.185929 0.159466 -0.96587
+0.0719027 0.191041 0.222295 -0.953372
+0.0592541 0.195335 0.284173 -0.936792
+0.0463517 0.198792 0.344833 -0.9162
+0.0332509 0.201398 0.404018 -0.891686
+0.0200076 0.203141 0.461472 -0.863352
+0.00667873 0.204015 0.516949 -0.831322
+-0.00667877 0.204015 0.570214 -0.795732
+-0.0200077 0.203141 0.621036 -0.756735
+-0.0332509 0.201398 0.669199 -0.714497
+-0.0463518 0.198792 0.714497 -0.669199
+-0.0592541 0.195335 0.756735 -0.621036
+-0.0719028 0.191041 0.795732 -0.570214
+-0.0842435 0.185929 0.831322 -0.516949
+-0.0962234 0.180021 0.863352 -0.461472
+-0.107791 0.173343 0.891686 -0.404017
+-0.118898 0.165922 0.9162 -0.344833
+-0.129495 0.15779 0.936792 -0.284173
+-0.139538 0.148983 0.953372 -0.222295
+-0.148983 0.139538 0.96587 -0.159466
+-0.15779 0.129495 0.974231 -0.0959533
+-0.165922 0.118898 0.978421 -0.0320303
+-0.173343 0.107791 0.978421 0.0320302
+-0.180021 0.0962235 0.974231 0.0959533
+-0.185929 0.0842435 0.96587 0.159466
+-0.191041 0.0719027 0.953372 0.222295
+-0.195335 0.0592541 0.936792 0.284173
+-0.198792 0.0463517 0.9162 0.344833
+-0.201398 0.0332509 0.891686 0.404017
+-0.203141 0.0200077 0.863352 0.461472
+-0.204015 0.00667877 0.831322 0.516949
+0.204015 0.00667875 -0.953372 -0.222295
+0.203141 0.0200077 -0.936792 -0.284173
+0.201398 0.0332509 -0.9162 -0.344833
+0.198792 0.0463518 -0.891686 -0.404017
+0.195335 0.0592541 -0.863352 -0.461471
+0.191041 0.0719027 -0.831322 -0.516949
+0.185929 0.0842435 -0.795732 -0.570214
+0.180021 0.0962235 -0.756735 -0.621036
+0.173343 0.107791 -0.714497 -0.669199
+0.165922 0.118898 -0.669199 -0.714497
+0.15779 0.129495 -0.621036 -0.756735
+0.148983 0.139538 -0.570214 -0.795732
+0.139538 0.148983 -0.516949 -0.831322
+0.129495 0.15779 -0.461471 -0.863352
+0.118898 0.165922 -0.404017 -0.891686
+0.107791 0.173343 -0.344833 -0.9162
+0.0962234 0.180021 -0.284173 -0.936792
+0.0842435 0.185929 -0.222295 -0.953372
+0.0719027 0.191041 -0.159466 -0.96587
+0.0592541 0.195335 -0.0959533 -0.974231
+0.0463517 0.198792 -0.0320302 -0.978421
+0.0332509 0.201398 0.0320303 -0.978421
+0.0200076 0.203141 0.0959535 -0.974231
+0.00667873 0.204015 0.159466 -0.96587
+-0.00667877 0.204015 0.222295 -0.953372
+-0.0200077 0.203141 0.284173 -0.936792
+-0.0332509 0.201398 0.344833 -0.9162
+-0.0463518 0.198792 0.404018 -0.891686
+-0.0592541 0.195335 0.461472 -0.863352
+-0.0719028 0.191041 0.51695 -0.831322
+-0.0842435 0.185929 0.570214 -0.795732
+-0.0962234 0.180021 0.621036 -0.756735
+-0.107791 0.173343 0.669199 -0.714497
+-0.118898 0.165922 0.714497 -0.669199
+-0.129495 0.15779 0.756735 -0.621036
+-0.139538 0.148983 0.795732 -0.570214
+-0.148983 0.139538 0.831322 -0.516949
+-0.15779 0.129495 0.863352 -0.461471
+-0.165922 0.118898 0.891686 -0.404018
+-0.173343 0.107791 0.9162 -0.344833
+-0.180021 0.0962235 0.936792 -0.284173
+-0.185929 0.0842435 0.953372 -0.222295
+-0.191041 0.0719027 0.96587 -0.159466
+-0.195335 0.0592541 0.974231 -0.0959534
+-0.198792 0.0463517 0.978421 -0.0320301
+-0.201398 0.0332509 0.978421 0.0320301
+-0.203141 0.0200077 0.974231 0.0959534
+-0.204015 0.00667877 0.96587 0.159466
+0.255019 0.00834844 -0.846691 -0.466916
+0.253927 0.0250096 -0.81434 -0.521293
+0.251747 0.0415636 -0.778502 -0.573437
+0.24849 0.0579397 -0.739331 -0.623126
+0.244168 0.0740676 -0.696993 -0.670146
+0.238801 0.0898784 -0.651671 -0.714297
+0.232412 0.105304 -0.603559 -0.755389
+0.225027 0.120279 -0.552862 -0.793246
+0.216678 0.134739 -0.499797 -0.827707
+0.207402 0.148622 -0.444593 -0.858623
+0.197238 0.161869 -0.387484 -0.885862
+0.186229 0.174422 -0.328716 -0.909308
+0.174422 0.186229 -0.268541 -0.92886
+0.161869 0.197238 -0.207216 -0.944435
+0.148622 0.207402 -0.145003 -0.955965
+0.134739 0.216678 -0.0821692 -0.963402
+0.120279 0.225027 -0.0189837 -0.966714
+0.105304 0.232412 0.044283 -0.965886
+0.0898784 0.238801 0.10736 -0.960921
+0.0740676 0.244168 0.169977 -0.951842
+0.0579397 0.24849 0.231867 -0.938687
+0.0415636 0.251747 0.292764 -0.921512
+0.0250096 0.253927 0.352407 -0.900392
+0.00834842 0.255019 0.410541 -0.875415
+-0.00834847 0.255019 0.466916 -0.846691
+-0.0250096 0.253927 0.521293 -0.81434
+-0.0415636 0.251747 0.573437 -0.778502
+-0.0579397 0.24849 0.623126 -0.739331
+-0.0740677 0.244168 0.670146 -0.696993
+-0.0898785 0.238801 0.714297 -0.651671
+-0.105304 0.232412 0.755389 -0.603559
+-0.120279 0.225027 0.793246 -0.552862
+-0.134739 0.216678 0.827707 -0.499797
+-0.148622 0.207402 0.858623 -0.444593
+-0.161869 0.197238 0.885862 -0.387484
+-0.174422 0.186229 0.909308 -0.328716
+-0.186229 0.174422 0.92886 -0.268541
+-0.197238 0.161869 0.944435 -0.207216
+-0.207402 0.148622 0.955965 -0.145003
+-0.216678 0.134739 0.963402 -0.0821693
+-0.225027 0.120279 0.966714 -0.0189839
+-0.232412 0.105304 0.965886 0.0442829
+-0.238801 0.0898784 0.960921 0.10736
+-0.244168 0.0740676 0.951842 0.169977
+-0.24849 0.0579397 0.938687 0.231867
+-0.251747 0.0415636 0.921513 0.292764
+-0.253927 0.0250096 0.900392 0.352407
+-0.255019 0.00834847 0.875416 0.41054
+0.255019 0.00834844 -0.660965 -0.705706
+0.253927 0.0250096 -0.613395 -0.747424
+0.251747 0.0415636 -0.563198 -0.785942
+0.24849 0.0579397 -0.510589 -0.821094
+0.244168 0.0740676 -0.455794 -0.85273
+0.238801 0.0898784 -0.399046 -0.880714
+0.232412 0.105304 -0.340591 -0.904928
+0.225027 0.120279 -0.280676 -0.925266
+0.216678 0.134739 -0.21956 -0.941642
+0.207402 0.148622 -0.157504 -0.953986
+0.197238 0.161869 -0.0947728 -0.962244
+0.186229 0.174422 -0.0316361 -0.966382
+0.174422 0.186229 0.0316361 -0.966382
+0.161869 0.197238 0.0947728 -0.962244
+0.148622 0.207402 0.157504 -0.953986
+0.134739 0.216678 0.21956 -0.941642
+0.120279 0.225027 0.280676 -0.925266
+0.105304 0.232412 0.340591 -0.904928
+0.0898784 0.238801 0.399047 -0.880714
+0.0740676 0.244168 0.455794 -0.85273
+0.0579397 0.24849 0.510589 -0.821094
+0.0415636 0.251747 0.563198 -0.785941
+0.0250096 0.253927 0.613395 -0.747424
+0.00834842 0.255019 0.660966 -0.705706
+-0.00834847 0.255019 0.705706 -0.660965
+-0.0250096 0.253927 0.747424 -0.613395
+-0.0415636 0.251747 0.785942 -0.563198
+-0.0579397 0.24849 0.821094 -0.510589
+-0.0740677 0.244168 0.85273 -0.455793
+-0.0898785 0.238801 0.880714 -0.399046
+-0.105304 0.232412 0.904928 -0.34059
+-0.120279 0.225027 0.925266 -0.280676
+-0.134739 0.216678 0.941642 -0.21956
+-0.148622 0.207402 0.953986 -0.157504
+-0.161869 0.197238 0.962244 -0.0947728
+-0.174422 0.186229 0.966382 -0.031636
+-0.186229 0.174422 0.966382 0.031636
+-0.197238 0.161869 0.962244 0.0947728
+-0.207402 0.148622 0.953986 0.157504
+-0.216678 0.134739 0.941642 0.21956
+-0.225027 0.120279 0.925266 0.280676
+-0.232412 0.105304 0.904928 0.340591
+-0.238801 0.0898784 0.880714 0.399047
+-0.244168 0.0740676 0.85273 0.455794
+-0.24849 0.0579397 0.821094 0.510589
+-0.251747 0.0415636 0.785942 0.563198
+-0.253927 0.0250096 0.747424 0.613395
+-0.255019 0.00834847 0.705706 0.660965
+0.306022 0.0100181 -0.554502 -0.773807
+0.304712 0.0300115 -0.502706 -0.808416
+0.302097 0.0498763 -0.448756 -0.839564
+0.298188 0.0695276 -0.392885 -0.867117
+0.293002 0.0888812 -0.335332 -0.890956
+0.286561 0.107854 -0.276343 -0.91098
+0.278894 0.126365 -0.21617 -0.927103
+0.270032 0.144335 -0.155072 -0.939256
+0.260014 0.161687 -0.0933095 -0.947388
+0.248882 0.178347 -0.0311476 -0.951462
+0.236685 0.194242 0.0311476 -0.951462
+0.223474 0.209307 0.0933096 -0.947388
+0.209307 0.223474 0.155072 -0.939256
+0.194242 0.236685 0.21617 -0.927103
+0.178347 0.248882 0.276343 -0.91098
+0.161687 0.260014 0.335332 -0.890956
+0.144335 0.270032 0.392885 -0.867116
+0.126365 0.278894 0.448756 -0.839564
+0.107854 0.286562 0.502706 -0.808416
+0.0888812 0.293002 0.554502 -0.773807
+0.0695276 0.298188 0.603924 -0.735884
+0.0498763 0.302097 0.650761 -0.69481
+0.0300115 0.304712 0.69481 -0.65076
+0.0100181 0.306022 0.735884 -0.603924
+-0.0100182 0.306022 0.773807 -0.554502
+-0.0300115 0.304712 0.808416 -0.502706
+-0.0498764 0.302097 0.839564 -0.448756
+-0.0695276 0.298188 0.867117 -0.392885
+-0.0888812 0.293002 0.890956 -0.335332
+-0.107854 0.286561 0.91098 -0.276343
+-0.126365 0.278894 0.927103 -0.21617
+-0.144335 0.270032 0.939256 -0.155072
+-0.161687 0.260014 0.947388 -0.0933095
+-0.178347 0.248882 0.951462 -0.0311475
+-0.194242 0.236685 0.951462 0.0311476
+-0.209307 0.223474 0.947388 0.0933096
+-0.223474 0.209307 0.939256 0.155072
+-0.236685 0.194242 0.927103 0.21617
+-0.248882 0.178347 0.91098 0.276343
+-0.260014 0.161687 0.890956 0.335332
+-0.270032 0.144335 0.867117 0.392885
+-0.278894 0.126365 0.839564 0.448756
+-0.286562 0.107854 0.808416 0.502706
+-0.293002 0.0888812 0.773807 0.554502
+-0.298188 0.0695276 0.735884 0.603924
+-0.302097 0.0498764 0.69481 0.65076
+-0.304712 0.0300115 0.65076 0.69481
+-0.306022 0.0100182 0.603924 0.735884
+0.306022 0.0100181 -0.735884 -0.603924
+0.304712 0.0300115 -0.69481 -0.65076
+0.302097 0.0498763 -0.65076 -0.69481
+0.298188 0.0695276 -0.603924 -0.735884
+0.293002 0.0888812 -0.554502 -0.773807
+0.286561 0.107854 -0.502706 -0.808416
+0.278894 0.126365 -0.448756 -0.839564
+0.270032 0.144335 -0.392885 -0.867117
+0.260014 0.161687 -0.335332 -0.890956
+0.248882 0.178347 -0.276343 -0.91098
+0.236685 0.194242 -0.21617 -0.927103
+0.223474 0.209307 -0.155072 -0.939256
+0.209307 0.223474 -0.0933095 -0.947388
+0.194242 0.236685 -0.0311476 -0.951462
+0.178347 0.248882 0.0311477 -0.951462
+0.161687 0.260014 0.0933096 -0.947388
+0.144335 0.270032 0.155072 -0.939256
+0.126365 0.278894 0.21617 -0.927103
+0.107854 0.286562 0.276343 -0.91098
+0.0888812 0.293002 0.335332 -0.890956
+0.0695276 0.298188 0.392885 -0.867117
+0.0498763 0.302097 0.448756 -0.839564
+0.0300115 0.304712 0.502706 -0.808416
+0.0100181 0.306022 0.554502 -0.773807
+-0.0100182 0.306022 0.603924 -0.735884
+-0.0300115 0.304712 0.650761 -0.69481
+-0.0498764 0.302097 0.69481 -0.65076
+-0.0695276 0.298188 0.735884 -0.603924
+-0.0888812 0.293002 0.773807 -0.554502
+-0.107854 0.286561 0.808416 -0.502705
+-0.126365 0.278894 0.839564 -0.448756
+-0.144335 0.270032 0.867116 -0.392885
+-0.161687 0.260014 0.890956 -0.335332
+-0.178347 0.248882 0.91098 -0.276343
+-0.194242 0.236685 0.927103 -0.21617
+-0.209307 0.223474 0.939256 -0.155072
+-0.223474 0.209307 0.947388 -0.0933096
+-0.236685 0.194242 0.951462 -0.0311476
+-0.248882 0.178347 0.951462 0.0311476
+-0.260014 0.161687 0.947388 0.0933096
+-0.270032 0.144335 0.939256 0.155072
+-0.278894 0.126365 0.927103 0.21617
+-0.286562 0.107854 0.91098 0.276343
+-0.293002 0.0888812 0.890956 0.335332
+-0.298188 0.0695276 0.867116 0.392885
+-0.302097 0.0498764 0.839564 0.448756
+-0.304712 0.0300115 0.808416 0.502706
+-0.306022 0.0100182 0.773807 0.554502
+0.357026 0.0116878 -0.63849 -0.681709
+0.355497 0.0350134 -0.592537 -0.722008
+0.352446 0.0581891 -0.544047 -0.759216
+0.347886 0.0811156 -0.493227 -0.793173
+0.341836 0.103695 -0.440295 -0.823733
+0.334322 0.12583 -0.385477 -0.850766
+0.325376 0.147426 -0.329009 -0.874156
+0.315037 0.168391 -0.271132 -0.893803
+0.30335 0.188635 -0.212094 -0.909622
+0.290363 0.208071 -0.152148 -0.921546
+0.276133 0.226616 -0.0915501 -0.929524
+0.26072 0.244191 -0.0305603 -0.933521
+0.244191 0.26072 0.0305603 -0.933521
+0.226616 0.276133 0.0915501 -0.929524
+0.208071 0.290363 0.152148 -0.921546
+0.188635 0.30335 0.212094 -0.909622
+0.168391 0.315038 0.271132 -0.893803
+0.147426 0.325376 0.329009 -0.874156
+0.12583 0.334322 0.385477 -0.850766
+0.103695 0.341836 0.440295 -0.823733
+0.0811155 0.347886 0.493227 -0.793173
+0.058189 0.352446 0.544047 -0.759216
+0.0350134 0.355497 0.592537 -0.722008
+0.0116878 0.357026 0.63849 -0.681709
+-0.0116879 0.357026 0.681709 -0.63849
+-0.0350134 0.355497 0.722008 -0.592537
+-0.0581891 0.352446 0.759216 -0.544047
+-0.0811156 0.347886 0.793173 -0.493227
+-0.103695 0.341836 0.823733 -0.440294
+-0.12583 0.334322 0.850766 -0.385477
+-0.147426 0.325376 0.874156 -0.329009
+-0.168391 0.315038 0.893803 -0.271132
+-0.188635 0.30335 0.909622 -0.212094
+-0.208071 0.290363 0.921546 -0.152148
+-0.226616 0.276133 0.929524 -0.0915501
+-0.244191 0.26072 0.933521 -0.0305603
+-0.26072 0.244191 0.933521 0.0305603
+-0.276133 0.226616 0.929524 0.0915501
+-0.290363 0.208071 0.921546 0.152148
+-0.30335 0.188635 0.909622 0.212094
+-0.315037 0.168391 0.893803 0.271132
+-0.325376 0.147426 0.874156 0.329009
+-0.334322 0.12583 0.850766 0.385477
+-0.341836 0.103695 0.823733 0.440295
+-0.347886 0.0811155 0.793173 0.493227
+-0.352446 0.0581891 0.759216 0.544047
+-0.355497 0.0350134 0.722008 0.592537
+-0.357026 0.0116879 0.681709 0.63849
+0.255019 0.00834844 -0.119929 -0.959434
+0.253927 0.0250096 -0.0569222 -0.965223
+0.251747 0.0415636 0.0063283 -0.966879
+0.24849 0.0579397 0.0695517 -0.964395
+0.244168 0.0740676 0.132477 -0.957782
+0.238801 0.0898784 0.194836 -0.947067
+0.232412 0.105304 0.256359 -0.932296
+0.225027 0.120279 0.316786 -0.913533
+0.216678 0.134739 0.375855 -0.890858
+0.207402 0.148622 0.433316 -0.864369
+0.197238 0.161869 0.48892 -0.834178
+0.186229 0.174422 0.542431 -0.800415
+0.174422 0.186229 0.593619 -0.763225
+0.161869 0.197238 0.642266 -0.722766
+0.148622 0.207402 0.688162 -0.679212
+0.134739 0.216678 0.731111 -0.63275
+0.120279 0.225027 0.770929 -0.583578
+0.105304 0.232412 0.807447 -0.531908
+0.0898784 0.238801 0.840506 -0.477959
+0.0740676 0.244168 0.869967 -0.421964
+0.0579397 0.24849 0.895702 -0.364162
+0.0415636 0.251747 0.917601 -0.304801
+0.0250096 0.253927 0.935572 -0.244134
+0.00834842 0.255019 0.949536 -0.182422
+-0.00834847 0.255019 0.959434 -0.119929
+-0.0250096 0.253927 0.965223 -0.0569221
+-0.0415636 0.251747 0.966879 0.00632837
+-0.0579397 0.24849 0.964395 0.0695517
+-0.0740677 0.244168 0.957782 0.132477
+-0.0898785 0.238801 0.947066 0.194836
+-0.105304 0.232412 0.932296 0.25636
+-0.120279 0.225027 0.913533 0.316786
+-0.134739 0.216678 0.890858 0.375855
+-0.148622 0.207402 0.864369 0.433316
+-0.161869 0.197238 0.834178 0.48892
+-0.174422 0.186229 0.800415 0.542431
+-0.186229 0.174422 0.763225 0.593619
+-0.197238 0.161869 0.722766 0.642266
+-0.207402 0.148622 0.679212 0.688162
+-0.216678 0.134739 0.63275 0.731111
+-0.225027 0.120279 0.583578 0.770929
+-0.232412 0.105304 0.531908 0.807447
+-0.238801 0.0898784 0.477959 0.840506
+-0.244168 0.0740676 0.421964 0.869967
+-0.24849 0.0579397 0.364162 0.895702
+-0.251747 0.0415636 0.304801 0.917601
+-0.253927 0.0250096 0.244134 0.935572
+-0.255019 0.00834847 0.182422 0.949536
+0.306022 0.0100181 -0.0933095 -0.947388
+0.304712 0.0300115 -0.0311476 -0.951462
+0.302097 0.0498763 0.0311476 -0.951462
+0.298188 0.0695276 0.0933096 -0.947388
+0.293002 0.0888812 0.155072 -0.939256
+0.286561 0.107854 0.21617 -0.927103
+0.278894 0.126365 0.276343 -0.91098
+0.270032 0.144335 0.335332 -0.890956
+0.260014 0.161687 0.392885 -0.867117
+0.248882 0.178347 0.448756 -0.839564
+0.236685 0.194242 0.502706 -0.808416
+0.223474 0.209307 0.554502 -0.773807
+0.209307 0.223474 0.603924 -0.735884
+0.194242 0.236685 0.65076 -0.69481
+0.178347 0.248882 0.69481 -0.65076
+0.161687 0.260014 0.735884 -0.603924
+0.144335 0.270032 0.773807 -0.554502
+0.126365 0.278894 0.808416 -0.502706
+0.107854 0.286562 0.839564 -0.448756
+0.0888812 0.293002 0.867117 -0.392885
+0.0695276 0.298188 0.890956 -0.335332
+0.0498763 0.302097 0.91098 -0.276343
+0.0300115 0.304712 0.927103 -0.21617
+0.0100181 0.306022 0.939256 -0.155072
+-0.0100182 0.306022 0.947388 -0.0933094
+-0.0300115 0.304712 0.951462 -0.0311476
+-0.0498764 0.302097 0.951462 0.0311477
+-0.0695276 0.298188 0.947388 0.0933096
+-0.0888812 0.293002 0.939256 0.155072
+-0.107854 0.286561 0.927103 0.21617
+-0.126365 0.278894 0.91098 0.276343
+-0.144335 0.270032 0.890956 0.335332
+-0.161687 0.260014 0.867117 0.392885
+-0.178347 0.248882 0.839564 0.448756
+-0.194242 0.236685 0.808416 0.502706
+-0.209307 0.223474 0.773807 0.554502
+-0.223474 0.209307 0.735884 0.603924
+-0.236685 0.194242 0.69481 0.650761
+-0.248882 0.178347 0.650761 0.69481
+-0.260014 0.161687 0.603924 0.735884
+-0.270032 0.144335 0.554502 0.773807
+-0.278894 0.126365 0.502706 0.808416
+-0.286562 0.107854 0.448756 0.839564
+-0.293002 0.0888812 0.392885 0.867117
+-0.298188 0.0695276 0.335332 0.890956
+-0.302097 0.0498764 0.276343 0.91098
+-0.304712 0.0300115 0.21617 0.927103
+-0.306022 0.0100182 0.155072 0.939256
+0.306022 0.0100181 -0.335332 -0.890956
+0.304712 0.0300115 -0.276343 -0.91098
+0.302097 0.0498763 -0.21617 -0.927103
+0.298188 0.0695276 -0.155072 -0.939256
+0.293002 0.0888812 -0.0933095 -0.947388
+0.286561 0.107854 -0.0311477 -0.951462
+0.278894 0.126365 0.0311476 -0.951462
+0.270032 0.144335 0.0933096 -0.947388
+0.260014 0.161687 0.155072 -0.939256
+0.248882 0.178347 0.21617 -0.927103
+0.236685 0.194242 0.276343 -0.91098
+0.223474 0.209307 0.335332 -0.890956
+0.209307 0.223474 0.392885 -0.867117
+0.194242 0.236685 0.448756 -0.839564
+0.178347 0.248882 0.502706 -0.808416
+0.161687 0.260014 0.554502 -0.773807
+0.144335 0.270032 0.603924 -0.735884
+0.126365 0.278894 0.650761 -0.69481
+0.107854 0.286562 0.69481 -0.65076
+0.0888812 0.293002 0.735884 -0.603924
+0.0695276 0.298188 0.773807 -0.554502
+0.0498763 0.302097 0.808416 -0.502706
+0.0300115 0.304712 0.839564 -0.448756
+0.0100181 0.306022 0.867117 -0.392885
+-0.0100182 0.306022 0.890956 -0.335332
+-0.0300115 0.304712 0.91098 -0.276343
+-0.0498764 0.302097 0.927103 -0.21617
+-0.0695276 0.298188 0.939256 -0.155072
+-0.0888812 0.293002 0.947388 -0.0933094
+-0.107854 0.286561 0.951462 -0.0311475
+-0.126365 0.278894 0.951462 0.0311478
+-0.144335 0.270032 0.947388 0.0933094
+-0.161687 0.260014 0.939256 0.155072
+-0.178347 0.248882 0.927103 0.21617
+-0.194242 0.236685 0.91098 0.276343
+-0.209307 0.223474 0.890956 0.335332
+-0.223474 0.209307 0.867117 0.392885
+-0.236685 0.194242 0.839564 0.448756
+-0.248882 0.178347 0.808416 0.502706
+-0.260014 0.161687 0.773807 0.554502
+-0.270032 0.144335 0.735884 0.603924
+-0.278894 0.126365 0.69481 0.65076
+-0.286562 0.107854 0.65076 0.69481
+-0.293002 0.0888812 0.603924 0.735884
+-0.298188 0.0695276 0.554502 0.773807
+-0.302097 0.0498764 0.502706 0.808416
+-0.304712 0.0300115 0.448756 0.839564
+-0.306022 0.0100182 0.392885 0.867116
+0.357026 0.0116878 -0.279477 -0.891229
+0.355497 0.0350134 -0.22059 -0.907599
+0.352446 0.0581891 -0.160758 -0.920083
+0.347886 0.0811156 -0.100237 -0.928627
+0.341836 0.103695 -0.0392873 -0.933195
+0.334322 0.12583 0.0218307 -0.933766
+0.325376 0.147426 0.0828552 -0.930339
+0.315037 0.168391 0.143525 -0.922928
+0.30335 0.188635 0.20358 -0.911565
+0.290363 0.208071 0.262763 -0.896299
+0.276133 0.226616 0.320821 -0.877194
+0.26072 0.244191 0.377506 -0.854333
+0.244191 0.26072 0.432574 -0.827814
+0.226616 0.276133 0.485789 -0.79775
+0.208071 0.290363 0.536924 -0.76427
+0.188635 0.30335 0.58576 -0.727517
+0.168391 0.315038 0.632088 -0.687649
+0.147426 0.325376 0.675709 -0.644836
+0.12583 0.334322 0.716437 -0.599262
+0.103695 0.341836 0.754096 -0.551121
+0.0811155 0.347886 0.788527 -0.500621
+0.058189 0.352446 0.819581 -0.447977
+0.0350134 0.355497 0.847125 -0.393415
+0.0116878 0.357026 0.871042 -0.337168
+-0.0116879 0.357026 0.891229 -0.279477
+-0.0350134 0.355497 0.907599 -0.22059
+-0.0581891 0.352446 0.920083 -0.160757
+-0.0811156 0.347886 0.928627 -0.100237
+-0.103695 0.341836 0.933195 -0.0392871
+-0.12583 0.334322 0.933766 0.0218308
+-0.147426 0.325376 0.930339 0.0828553
+-0.168391 0.315038 0.922928 0.143525
+-0.188635 0.30335 0.911565 0.20358
+-0.208071 0.290363 0.896299 0.262763
+-0.226616 0.276133 0.877194 0.320821
+-0.244191 0.26072 0.854333 0.377506
+-0.26072 0.244191 0.827814 0.432574
+-0.276133 0.226616 0.79775 0.485789
+-0.290363 0.208071 0.76427 0.536924
+-0.30335 0.188635 0.727517 0.58576
+-0.315037 0.168391 0.687649 0.632088
+-0.325376 0.147426 0.644836 0.675709
+-0.334322 0.12583 0.599262 0.716437
+-0.341836 0.103695 0.551121 0.754096
+-0.347886 0.0811155 0.500621 0.788527
+-0.352446 0.0581891 0.447977 0.819581
+-0.355497 0.0350134 0.393415 0.847125
+-0.357026 0.0116879 0.337168 0.871042
+0.357026 0.0116878 -0.0741531 -0.931073
+0.355497 0.0350134 -0.0130992 -0.933929
+0.352446 0.0581891 0.0480108 -0.932787
+0.347886 0.0811156 0.108915 -0.927649
+0.341836 0.103695 0.169353 -0.91854
+0.334322 0.12583 0.229066 -0.905497
+0.325376 0.147426 0.287798 -0.888577
+0.315037 0.168391 0.345297 -0.867851
+0.30335 0.188635 0.401318 -0.843409
+0.290363 0.208071 0.45562 -0.815356
+0.276133 0.226616 0.507972 -0.783811
+0.26072 0.244191 0.558148 -0.74891
+0.244191 0.26072 0.605934 -0.710802
+0.226616 0.276133 0.651125 -0.66965
+0.208071 0.290363 0.693528 -0.625631
+0.188635 0.30335 0.732962 -0.578932
+0.168391 0.315038 0.769256 -0.529755
+0.147426 0.325376 0.802257 -0.478309
+0.12583 0.334322 0.831822 -0.424815
+0.103695 0.341836 0.857825 -0.369501
+0.0811155 0.347886 0.880155 -0.312606
+0.058189 0.352446 0.898716 -0.254371
+0.0350134 0.355497 0.913429 -0.195048
+0.0116878 0.357026 0.92423 -0.134889
+-0.0116879 0.357026 0.931073 -0.074153
+-0.0350134 0.355497 0.933929 -0.0130991
+-0.0581891 0.352446 0.932787 0.0480108
+-0.0811156 0.347886 0.927649 0.108915
+-0.103695 0.341836 0.91854 0.169353
+-0.12583 0.334322 0.905497 0.229066
+-0.147426 0.325376 0.888577 0.287798
+-0.168391 0.315038 0.867851 0.345297
+-0.188635 0.30335 0.84341 0.401318
+-0.208071 0.290363 0.815356 0.455621
+-0.226616 0.276133 0.783812 0.507972
+-0.244191 0.26072 0.74891 0.558148
+-0.26072 0.244191 0.710802 0.605934
+-0.276133 0.226616 0.66965 0.651125
+-0.290363 0.208071 0.625631 0.693528
+-0.30335 0.188635 0.578933 0.732962
+-0.315037 0.168391 0.529755 0.769256
+-0.325376 0.147426 0.478309 0.802257
+-0.334322 0.12583 0.424815 0.831822
+-0.341836 0.103695 0.369501 0.857825
+-0.347886 0.0811155 0.312606 0.880155
+-0.352446 0.0581891 0.254372 0.898716
+-0.355497 0.0350134 0.195048 0.913429
+-0.357026 0.0116879 0.134889 0.92423
+0.40803 0.0133575 -0.0597046 -0.910916
+0.406282 0.0400153 2.49393e-09 -0.912871
+0.402795 0.0665018 0.0597046 -0.910916
+0.397584 0.0927035 0.119154 -0.905061
+0.390669 0.118508 0.178092 -0.89533
+0.382082 0.143805 0.236268 -0.881766
+0.371859 0.168487 0.293433 -0.864425
+0.360043 0.192447 0.349341 -0.843383
+0.346685 0.215583 0.403752 -0.818729
+0.331843 0.237796 0.456435 -0.790569
+0.31558 0.25899 0.507164 -0.759024
+0.297966 0.279075 0.555721 -0.724229
+0.279075 0.297966 0.601898 -0.686333
+0.25899 0.31558 0.645497 -0.645497
+0.237796 0.331843 0.686333 -0.601898
+0.215583 0.346685 0.724229 -0.555721
+0.192447 0.360043 0.759024 -0.507164
+0.168487 0.371859 0.790569 -0.456435
+0.143805 0.382082 0.818729 -0.403752
+0.118508 0.390669 0.843383 -0.349341
+0.0927035 0.397584 0.864425 -0.293433
+0.0665017 0.402795 0.881766 -0.236268
+0.0400153 0.406282 0.89533 -0.178092
+0.0133575 0.40803 0.905061 -0.119153
+-0.0133575 0.40803 0.910916 -0.0597045
+-0.0400154 0.406282 0.912871 7.64039e-08
+-0.0665018 0.402795 0.910916 0.0597047
+-0.0927035 0.397584 0.905061 0.119154
+-0.118508 0.390669 0.89533 0.178092
+-0.143806 0.382082 0.881766 0.236269
+-0.168487 0.371859 0.864425 0.293433
+-0.192447 0.360043 0.843383 0.34934
+-0.215583 0.346685 0.818729 0.403752
+-0.237796 0.331843 0.790569 0.456436
+-0.25899 0.31558 0.759024 0.507164
+-0.279075 0.297966 0.724229 0.555721
+-0.297966 0.279075 0.686333 0.601898
+-0.31558 0.25899 0.645497 0.645497
+-0.331843 0.237796 0.601898 0.686333
+-0.346685 0.215583 0.555721 0.724229
+-0.360043 0.192447 0.507164 0.759024
+-0.371859 0.168487 0.456435 0.790569
+-0.382082 0.143805 0.403752 0.818729
+-0.390669 0.118508 0.349341 0.843383
+-0.397584 0.0927035 0.293433 0.864425
+-0.402795 0.0665018 0.236268 0.881766
+-0.406282 0.0400153 0.178092 0.89533
+-0.40803 0.0133575 0.119154 0.905061
+0.40803 0.0133575 -0.236268 -0.881766
+0.406282 0.0400153 -0.178092 -0.89533
+0.402795 0.0665018 -0.119154 -0.905061
+0.397584 0.0927035 -0.0597046 -0.910916
+0.390669 0.118508 6.80367e-10 -0.912871
+0.382082 0.143805 0.0597046 -0.910916
+0.371859 0.168487 0.119154 -0.905061
+0.360043 0.192447 0.178092 -0.89533
+0.346685 0.215583 0.236268 -0.881766
+0.331843 0.237796 0.293433 -0.864425
+0.31558 0.25899 0.349341 -0.843383
+0.297966 0.279075 0.403753 -0.818729
+0.279075 0.297966 0.456435 -0.790569
+0.25899 0.31558 0.507164 -0.759024
+0.237796 0.331843 0.555721 -0.724229
+0.215583 0.346685 0.601898 -0.686333
+0.192447 0.360043 0.645497 -0.645497
+0.168487 0.371859 0.686333 -0.601898
+0.143805 0.382082 0.724229 -0.555721
+0.118508 0.390669 0.759024 -0.507164
+0.0927035 0.397584 0.790569 -0.456435
+0.0665017 0.402795 0.818729 -0.403752
+0.0400153 0.406282 0.843383 -0.34934
+0.0133575 0.40803 0.864425 -0.293433
+-0.0133575 0.40803 0.881766 -0.236268
+-0.0400154 0.406282 0.89533 -0.178092
+-0.0665018 0.402795 0.905061 -0.119154
+-0.0927035 0.397584 0.910916 -0.0597046
+-0.118508 0.390669 0.912871 1.49406e-07
+-0.143806 0.382082 0.910916 0.0597048
+-0.168487 0.371859 0.905061 0.119154
+-0.192447 0.360043 0.89533 0.178092
+-0.215583 0.346685 0.881766 0.236268
+-0.237796 0.331843 0.864425 0.293433
+-0.25899 0.31558 0.843383 0.349341
+-0.279075 0.297966 0.818729 0.403753
+-0.297966 0.279075 0.790569 0.456435
+-0.31558 0.25899 0.759024 0.507164
+-0.331843 0.237796 0.724229 0.555721
+-0.346685 0.215583 0.686333 0.601898
+-0.360043 0.192447 0.645497 0.645497
+-0.371859 0.168487 0.601898 0.686333
+-0.382082 0.143805 0.555721 0.724229
+-0.390669 0.118508 0.507164 0.759024
+-0.397584 0.0927035 0.456435 0.790569
+-0.402795 0.0665018 0.403753 0.818729
+-0.406282 0.0400153 0.349341 0.843383
+-0.40803 0.0133575 0.293433 0.864425
+0.456191 0.0149342 -0.144937 -0.877872
+0.454238 0.0447385 -0.0872114 -0.885472
+0.450339 0.0743513 -0.029112 -0.88928
+0.444512 0.103646 0.029112 -0.88928
+0.436782 0.132496 0.0872114 -0.885472
+0.427181 0.160779 0.144937 -0.877872
+0.415751 0.188374 0.202043 -0.866513
+0.40254 0.215162 0.258283 -0.851444
+0.387606 0.241029 0.313417 -0.832728
+0.371012 0.265863 0.367209 -0.810447
+0.352829 0.28956 0.419428 -0.784695
+0.333136 0.312016 0.469852 -0.755583
+0.312016 0.333136 0.518263 -0.723236
+0.28956 0.352829 0.564456 -0.687791
+0.265863 0.371012 0.608231 -0.649401
+0.241029 0.387606 0.649401 -0.608231
+0.215162 0.40254 0.687791 -0.564456
+0.188374 0.415751 0.723236 -0.518263
+0.160779 0.427181 0.755583 -0.469852
+0.132496 0.436782 0.784695 -0.419428
+0.103646 0.444512 0.810447 -0.367209
+0.0743512 0.450339 0.832728 -0.313417
+0.0447384 0.454238 0.851444 -0.258283
+0.0149341 0.456191 0.866513 -0.202042
+-0.0149342 0.456191 0.877872 -0.144937
+-0.0447385 0.454238 0.885472 -0.0872113
+-0.0743513 0.450339 0.88928 -0.029112
+-0.103646 0.444512 0.88928 0.0291121
+-0.132496 0.436781 0.885472 0.0872115
+-0.160779 0.427181 0.877872 0.144937
+-0.188374 0.415751 0.866513 0.202043
+-0.215162 0.40254 0.851444 0.258283
+-0.241029 0.387606 0.832728 0.313417
+-0.265864 0.371012 0.810447 0.367209
+-0.28956 0.352829 0.784695 0.419428
+-0.312016 0.333136 0.755583 0.469852
+-0.333136 0.312016 0.723236 0.518263
+-0.352829 0.28956 0.687791 0.564456
+-0.371012 0.265864 0.649401 0.608231
+-0.387606 0.241029 0.608231 0.649401
+-0.40254 0.215162 0.564456 0.687791
+-0.415751 0.188374 0.518263 0.723236
+-0.427181 0.160779 0.469852 0.755583
+-0.436782 0.132496 0.419428 0.784695
+-0.444512 0.103646 0.367209 0.810447
+-0.450339 0.0743513 0.313417 0.832728
+-0.454238 0.0447385 0.258283 0.851444
+-0.456191 0.0149342 0.202043 0.866513
+0.357026 0.0116878 -0.470787 -0.806694
+0.355497 0.0350134 -0.417019 -0.835758
+0.352446 0.0581891 -0.361465 -0.861243
+0.347886 0.0811156 -0.304363 -0.88304
+0.341836 0.103695 -0.245958 -0.901055
+0.334322 0.12583 -0.186499 -0.915212
+0.325376 0.147426 -0.126242 -0.925451
+0.315037 0.168391 -0.0654444 -0.931726
+0.30335 0.188635 -0.00436652 -0.934011
+0.290363 0.208071 0.0567301 -0.932297
+0.276133 0.226616 0.117584 -0.92659
+0.26072 0.244191 0.177934 -0.916916
+0.244191 0.26072 0.237522 -0.903316
+0.226616 0.276133 0.296093 -0.885847
+0.208071 0.290363 0.353396 -0.864585
+0.188635 0.30335 0.409186 -0.83962
+0.168391 0.315038 0.463224 -0.811061
+0.147426 0.325376 0.515278 -0.779028
+0.12583 0.334322 0.565126 -0.743659
+0.103695 0.341836 0.612553 -0.705106
+0.0811155 0.347886 0.657358 -0.663533
+0.058189 0.352446 0.699348 -0.619119
+0.0350134 0.355497 0.738343 -0.572054
+0.0116878 0.357026 0.774176 -0.522539
+-0.0116879 0.357026 0.806694 -0.470787
+-0.0350134 0.355497 0.835758 -0.417019
+-0.0581891 0.352446 0.861243 -0.361465
+-0.0811156 0.347886 0.88304 -0.304363
+-0.103695 0.341836 0.901055 -0.245957
+-0.12583 0.334322 0.915213 -0.186499
+-0.147426 0.325376 0.925451 -0.126242
+-0.168391 0.315038 0.931726 -0.0654445
+-0.188635 0.30335 0.934011 -0.00436653
+-0.208071 0.290363 0.932297 0.0567302
+-0.226616 0.276133 0.92659 0.117584
+-0.244191 0.26072 0.916916 0.177934
+-0.26072 0.244191 0.903316 0.237522
+-0.276133 0.226616 0.885847 0.296093
+-0.290363 0.208071 0.864585 0.353396
+-0.30335 0.188635 0.83962 0.409186
+-0.315037 0.168391 0.811061 0.463224
+-0.325376 0.147426 0.779028 0.515278
+-0.334322 0.12583 0.743659 0.565126
+-0.341836 0.103695 0.705106 0.612553
+-0.347886 0.0811155 0.663533 0.657358
+-0.352446 0.0581891 0.619119 0.699348
+-0.355497 0.0350134 0.572054 0.738343
+-0.357026 0.0116879 0.522539 0.774176
+0.40803 0.0133575 -0.403752 -0.818729
+0.406282 0.0400153 -0.349341 -0.843383
+0.402795 0.0665018 -0.293433 -0.864425
+0.397584 0.0927035 -0.236268 -0.881766
+0.390669 0.118508 -0.178092 -0.89533
+0.382082 0.143805 -0.119154 -0.905061
+0.371859 0.168487 -0.0597046 -0.910916
+0.360043 0.192447 1.92711e-08 -0.912871
+0.346685 0.215583 0.0597046 -0.910916
+0.331843 0.237796 0.119154 -0.905061
+0.31558 0.25899 0.178092 -0.89533
+0.297966 0.279075 0.236268 -0.881766
+0.279075 0.297966 0.293433 -0.864425
+0.25899 0.31558 0.349341 -0.843383
+0.237796 0.331843 0.403753 -0.818729
+0.215583 0.346685 0.456436 -0.790569
+0.192447 0.360043 0.507164 -0.759024
+0.168487 0.371859 0.555721 -0.724229
+0.143805 0.382082 0.601898 -0.686333
+0.118508 0.390669 0.645497 -0.645497
+0.0927035 0.397584 0.686333 -0.601898
+0.0665017 0.402795 0.724229 -0.555721
+0.0400153 0.406282 0.759024 -0.507164
+0.0133575 0.40803 0.790569 -0.456435
+-0.0133575 0.40803 0.818729 -0.403752
+-0.0400154 0.406282 0.843383 -0.349341
+-0.0665018 0.402795 0.864425 -0.293433
+-0.0927035 0.397584 0.881766 -0.236268
+-0.118508 0.390669 0.89533 -0.178092
+-0.143806 0.382082 0.905061 -0.119153
+-0.168487 0.371859 0.910916 -0.0597045
+-0.192447 0.360043 0.912871 -1.0406e-07
+-0.215583 0.346685 0.910916 0.0597046
+-0.237796 0.331843 0.905061 0.119154
+-0.25899 0.31558 0.89533 0.178092
+-0.279075 0.297966 0.881766 0.236268
+-0.297966 0.279075 0.864425 0.293433
+-0.31558 0.25899 0.843383 0.349341
+-0.331843 0.237796 0.818729 0.403752
+-0.346685 0.215583 0.790569 0.456435
+-0.360043 0.192447 0.759024 0.507164
+-0.371859 0.168487 0.724229 0.555721
+-0.382082 0.143805 0.686333 0.601898
+-0.390669 0.118508 0.645497 0.645497
+-0.397584 0.0927035 0.601898 0.686333
+-0.402795 0.0665018 0.555721 0.724229
+-0.406282 0.0400153 0.507164 0.759024
+-0.40803 0.0133575 0.456436 0.790569
+0.40803 0.0133575 -0.555721 -0.724229
+0.406282 0.0400153 -0.507164 -0.759024
+0.402795 0.0665018 -0.456435 -0.790569
+0.397584 0.0927035 -0.403752 -0.818729
+0.390669 0.118508 -0.349341 -0.843383
+0.382082 0.143805 -0.293433 -0.864425
+0.371859 0.168487 -0.236268 -0.881766
+0.360043 0.192447 -0.178092 -0.89533
+0.346685 0.215583 -0.119154 -0.905061
+0.331843 0.237796 -0.0597046 -0.910916
+0.31558 0.25899 -1.65496e-08 -0.912871
+0.297966 0.279075 0.0597046 -0.910916
+0.279075 0.297966 0.119154 -0.905061
+0.25899 0.31558 0.178092 -0.89533
+0.237796 0.331843 0.236268 -0.881766
+0.215583 0.346685 0.293433 -0.864425
+0.192447 0.360043 0.349341 -0.843383
+0.168487 0.371859 0.403753 -0.818729
+0.143805 0.382082 0.456436 -0.790569
+0.118508 0.390669 0.507164 -0.759024
+0.0927035 0.397584 0.555721 -0.724229
+0.0665017 0.402795 0.601898 -0.686333
+0.0400153 0.406282 0.645497 -0.645497
+0.0133575 0.40803 0.686333 -0.601898
+-0.0133575 0.40803 0.724229 -0.555721
+-0.0400154 0.406282 0.759024 -0.507164
+-0.0665018 0.402795 0.790569 -0.456435
+-0.0927035 0.397584 0.818729 -0.403752
+-0.118508 0.390669 0.843383 -0.34934
+-0.143806 0.382082 0.864425 -0.293433
+-0.168487 0.371859 0.881766 -0.236268
+-0.192447 0.360043 0.89533 -0.178092
+-0.215583 0.346685 0.905061 -0.119154
+-0.237796 0.331843 0.910916 -0.0597045
+-0.25899 0.31558 0.912871 -3.10581e-08
+-0.279075 0.297966 0.910916 0.0597047
+-0.297966 0.279075 0.905061 0.119154
+-0.31558 0.25899 0.89533 0.178092
+-0.331843 0.237796 0.881766 0.236268
+-0.346685 0.215583 0.864425 0.293433
+-0.360043 0.192447 0.843383 0.34934
+-0.371859 0.168487 0.818729 0.403752
+-0.382082 0.143805 0.790569 0.456436
+-0.390669 0.118508 0.759024 0.507164
+-0.397584 0.0927035 0.724229 0.555721
+-0.402795 0.0665018 0.686333 0.601898
+-0.406282 0.0400153 0.645497 0.645497
+-0.40803 0.0133575 0.601898 0.686333
+0.456191 0.0149342 -0.469852 -0.755583
+0.454238 0.0447385 -0.419428 -0.784695
+0.450339 0.0743513 -0.367209 -0.810447
+0.444512 0.103646 -0.313417 -0.832728
+0.436782 0.132496 -0.258283 -0.851444
+0.427181 0.160779 -0.202043 -0.866513
+0.415751 0.188374 -0.144937 -0.877872
+0.40254 0.215162 -0.0872114 -0.885472
+0.387606 0.241029 -0.029112 -0.88928
+0.371012 0.265863 0.029112 -0.88928
+0.352829 0.28956 0.0872114 -0.885472
+0.333136 0.312016 0.144937 -0.877872
+0.312016 0.333136 0.202043 -0.866513
+0.28956 0.352829 0.258283 -0.851444
+0.265863 0.371012 0.313417 -0.832728
+0.241029 0.387606 0.367209 -0.810447
+0.215162 0.40254 0.419428 -0.784695
+0.188374 0.415751 0.469852 -0.755583
+0.160779 0.427181 0.518263 -0.723236
+0.132496 0.436782 0.564456 -0.687791
+0.103646 0.444512 0.608231 -0.649401
+0.0743512 0.450339 0.649401 -0.608231
+0.0447384 0.454238 0.687791 -0.564455
+0.0149341 0.456191 0.723236 -0.518263
+-0.0149342 0.456191 0.755583 -0.469852
+-0.0447385 0.454238 0.784695 -0.419428
+-0.0743513 0.450339 0.810447 -0.367209
+-0.103646 0.444512 0.832728 -0.313417
+-0.132496 0.436781 0.851444 -0.258283
+-0.160779 0.427181 0.866513 -0.202042
+-0.188374 0.415751 0.877872 -0.144937
+-0.215162 0.40254 0.885472 -0.0872115
+-0.241029 0.387606 0.88928 -0.029112
+-0.265864 0.371012 0.88928 0.0291121
+-0.28956 0.352829 0.885472 0.0872114
+-0.312016 0.333136 0.877872 0.144937
+-0.333136 0.312016 0.866513 0.202043
+-0.352829 0.28956 0.851444 0.258283
+-0.371012 0.265864 0.832728 0.313417
+-0.387606 0.241029 0.810447 0.367209
+-0.40254 0.215162 0.784695 0.419428
+-0.415751 0.188374 0.755583 0.469852
+-0.427181 0.160779 0.723236 0.518263
+-0.436782 0.132496 0.687791 0.564456
+-0.444512 0.103646 0.649401 0.608231
+-0.450339 0.0743513 0.608231 0.649401
+-0.454238 0.0447385 0.564456 0.687791
+-0.456191 0.0149342 0.518263 0.723236
+0.456191 0.0149342 -0.313417 -0.832728
+0.454238 0.0447385 -0.258283 -0.851444
+0.450339 0.0743513 -0.202043 -0.866513
+0.444512 0.103646 -0.144937 -0.877872
+0.436782 0.132496 -0.0872114 -0.885472
+0.427181 0.160779 -0.029112 -0.88928
+0.415751 0.188374 0.029112 -0.88928
+0.40254 0.215162 0.0872114 -0.885472
+0.387606 0.241029 0.144937 -0.877872
+0.371012 0.265863 0.202043 -0.866513
+0.352829 0.28956 0.258283 -0.851444
+0.333136 0.312016 0.313417 -0.832728
+0.312016 0.333136 0.367209 -0.810447
+0.28956 0.352829 0.419428 -0.784695
+0.265863 0.371012 0.469852 -0.755583
+0.241029 0.387606 0.518263 -0.723236
+0.215162 0.40254 0.564456 -0.687791
+0.188374 0.415751 0.608231 -0.649401
+0.160779 0.427181 0.649401 -0.608231
+0.132496 0.436782 0.687791 -0.564456
+0.103646 0.444512 0.723236 -0.518263
+0.0743512 0.450339 0.755583 -0.469852
+0.0447384 0.454238 0.784695 -0.419428
+0.0149341 0.456191 0.810447 -0.367209
+-0.0149342 0.456191 0.832728 -0.313417
+-0.0447385 0.454238 0.851444 -0.258283
+-0.0743513 0.450339 0.866513 -0.202043
+-0.103646 0.444512 0.877872 -0.144937
+-0.132496 0.436781 0.885472 -0.0872112
+-0.160779 0.427181 0.88928 -0.0291119
+-0.188374 0.415751 0.88928 0.0291121
+-0.215162 0.40254 0.885472 0.0872113
+-0.241029 0.387606 0.877872 0.144937
+-0.265864 0.371012 0.866513 0.202043
+-0.28956 0.352829 0.851444 0.258283
+-0.312016 0.333136 0.832728 0.313417
+-0.333136 0.312016 0.810447 0.367209
+-0.352829 0.28956 0.784695 0.419428
+-0.371012 0.265864 0.755583 0.469852
+-0.387606 0.241029 0.723236 0.518263
+-0.40254 0.215162 0.687791 0.564455
+-0.415751 0.188374 0.649401 0.608231
+-0.427181 0.160779 0.608231 0.649401
+-0.436782 0.132496 0.564456 0.687791
+-0.444512 0.103646 0.518263 0.723236
+-0.450339 0.0743513 0.469852 0.755583
+-0.454238 0.0447385 0.419428 0.784695
+-0.456191 0.0149342 0.367209 0.810447
+0.499732 0.0163595 -0.224144 -0.836516
+0.497592 0.0490086 -0.168953 -0.849385
+0.493322 0.0814477 -0.113039 -0.858616
+0.486938 0.113538 -0.0566408 -0.864171
+0.47847 0.145142 6.45453e-10 -0.866025
+0.467953 0.176125 0.0566408 -0.864171
+0.455432 0.206354 0.113039 -0.858616
+0.440961 0.235698 0.168953 -0.849385
+0.424601 0.264034 0.224144 -0.836516
+0.406423 0.291239 0.278375 -0.820066
+0.386505 0.317197 0.331414 -0.800103
+0.364932 0.341796 0.383033 -0.776715
+0.341796 0.364932 0.433013 -0.75
+0.317197 0.386505 0.481138 -0.720074
+0.291239 0.406423 0.527203 -0.687064
+0.264034 0.424601 0.57101 -0.651112
+0.235698 0.440961 0.612372 -0.612372
+0.206353 0.455432 0.651112 -0.57101
+0.176125 0.467953 0.687064 -0.527203
+0.145142 0.47847 0.720074 -0.481138
+0.113538 0.486938 0.75 -0.433013
+0.0814477 0.493322 0.776715 -0.383033
+0.0490085 0.497592 0.800103 -0.331413
+0.0163595 0.499732 0.820066 -0.278375
+-0.0163596 0.499732 0.836516 -0.224144
+-0.0490086 0.497592 0.849385 -0.168953
+-0.0814478 0.493322 0.858616 -0.113039
+-0.113538 0.486938 0.864171 -0.0566407
+-0.145142 0.47847 0.866025 1.41739e-07
+-0.176125 0.467953 0.864171 0.0566409
+-0.206354 0.455432 0.858616 0.113039
+-0.235698 0.440961 0.849385 0.168953
+-0.264034 0.424601 0.836516 0.224144
+-0.291239 0.406423 0.820066 0.278375
+-0.317197 0.386505 0.800103 0.331414
+-0.341796 0.364932 0.776715 0.383033
+-0.364932 0.341796 0.75 0.433013
+-0.386505 0.317197 0.720074 0.481138
+-0.406423 0.291239 0.687064 0.527203
+-0.424601 0.264034 0.651112 0.57101
+-0.440961 0.235698 0.612373 0.612372
+-0.455432 0.206354 0.57101 0.651112
+-0.467953 0.176125 0.527203 0.687064
+-0.47847 0.145142 0.481138 0.720074
+-0.486938 0.113538 0.433013 0.75
+-0.493322 0.0814478 0.383033 0.776715
+-0.497592 0.0490085 0.331414 0.800103
+-0.499732 0.0163596 0.278375 0.820066
+0.499732 0.0163595 -0.383033 -0.776715
+0.497592 0.0490086 -0.331414 -0.800103
+0.493322 0.0814477 -0.278375 -0.820066
+0.486938 0.113538 -0.224144 -0.836516
+0.47847 0.145142 -0.168953 -0.849385
+0.467953 0.176125 -0.113039 -0.858616
+0.455432 0.206354 -0.0566408 -0.864171
+0.440961 0.235698 1.82821e-08 -0.866025
+0.424601 0.264034 0.0566408 -0.864171
+0.406423 0.291239 0.113039 -0.858616
+0.386505 0.317197 0.168953 -0.849385
+0.364932 0.341796 0.224144 -0.836516
+0.341796 0.364932 0.278375 -0.820066
+0.317197 0.386505 0.331414 -0.800103
+0.291239 0.406423 0.383033 -0.776715
+0.264034 0.424601 0.433013 -0.75
+0.235698 0.440961 0.481138 -0.720074
+0.206353 0.455432 0.527203 -0.687064
+0.176125 0.467953 0.57101 -0.651112
+0.145142 0.47847 0.612372 -0.612372
+0.113538 0.486938 0.651112 -0.57101
+0.0814477 0.493322 0.687064 -0.527203
+0.0490085 0.497592 0.720074 -0.481138
+0.0163595 0.499732 0.75 -0.433013
+-0.0163596 0.499732 0.776715 -0.383033
+-0.0490086 0.497592 0.800103 -0.331414
+-0.0814478 0.493322 0.820066 -0.278375
+-0.113538 0.486938 0.836516 -0.224144
+-0.145142 0.47847 0.849385 -0.168953
+-0.176125 0.467953 0.858616 -0.113039
+-0.206354 0.455432 0.864171 -0.0566407
+-0.235698 0.440961 0.866025 -9.87201e-08
+-0.264034 0.424601 0.864171 0.0566408
+-0.291239 0.406423 0.858616 0.113039
+-0.317197 0.386505 0.849385 0.168953
+-0.341796 0.364932 0.836516 0.224144
+-0.364932 0.341796 0.820066 0.278375
+-0.386505 0.317197 0.800103 0.331414
+-0.406423 0.291239 0.776715 0.383033
+-0.424601 0.264034 0.75 0.433013
+-0.440961 0.235698 0.720074 0.481138
+-0.455432 0.206354 0.687064 0.527203
+-0.467953 0.176125 0.651112 0.57101
+-0.47847 0.145142 0.612372 0.612372
+-0.486938 0.113538 0.57101 0.651112
+-0.493322 0.0814478 0.527203 0.687064
+-0.497592 0.0490085 0.481138 0.720074
+-0.499732 0.0163596 0.433013 0.75
+0.539773 0.0176703 -0.296463 -0.787682
+0.537461 0.0529353 -0.244311 -0.805385
+0.532848 0.0879736 -0.191113 -0.81964
+0.525954 0.122635 -0.137097 -0.830384
+0.516807 0.156772 -0.0824937 -0.837573
+0.505447 0.190237 -0.0275372 -0.841175
+0.491923 0.222887 0.0275372 -0.841175
+0.476292 0.254583 0.0824937 -0.837573
+0.458622 0.285189 0.137097 -0.830384
+0.438987 0.314574 0.191113 -0.81964
+0.417473 0.342612 0.244311 -0.805385
+0.394172 0.369182 0.296463 -0.787682
+0.369182 0.394172 0.347345 -0.766606
+0.342612 0.417473 0.396739 -0.742247
+0.314574 0.438987 0.444435 -0.71471
+0.285189 0.458622 0.490228 -0.684112
+0.254583 0.476292 0.533922 -0.650585
+0.222887 0.491923 0.575329 -0.614272
+0.190237 0.505447 0.614272 -0.575329
+0.156772 0.516807 0.650585 -0.533921
+0.122635 0.525954 0.684112 -0.490228
+0.0879735 0.532848 0.71471 -0.444435
+0.0529352 0.537461 0.742247 -0.396739
+0.0176703 0.539773 0.766606 -0.347345
+-0.0176704 0.539773 0.787682 -0.296463
+-0.0529354 0.537461 0.805385 -0.244311
+-0.0879736 0.532848 0.81964 -0.191113
+-0.122635 0.525954 0.830384 -0.137097
+-0.156772 0.516807 0.837573 -0.0824936
+-0.190237 0.505447 0.841175 -0.0275371
+-0.222887 0.491923 0.841175 0.0275373
+-0.254583 0.476292 0.837573 0.0824936
+-0.285189 0.458622 0.830384 0.137097
+-0.314574 0.438987 0.81964 0.191113
+-0.342612 0.417473 0.805385 0.244311
+-0.369182 0.394172 0.787682 0.296463
+-0.394172 0.369182 0.766606 0.347345
+-0.417473 0.342612 0.742247 0.39674
+-0.438987 0.314574 0.71471 0.444435
+-0.458622 0.285189 0.684112 0.490228
+-0.476292 0.254583 0.650585 0.533921
+-0.491923 0.222887 0.614272 0.575329
+-0.505447 0.190237 0.575329 0.614272
+-0.516807 0.156772 0.533922 0.650585
+-0.525954 0.122635 0.490228 0.684112
+-0.532848 0.0879736 0.444435 0.71471
+-0.537461 0.0529353 0.396739 0.742247
+-0.539773 0.0176704 0.347345 0.766606
+0.255019 0.00834844 -0.949536 -0.182422
+0.253927 0.0250096 -0.935572 -0.244134
+0.251747 0.0415636 -0.917601 -0.304801
+0.24849 0.0579397 -0.895702 -0.364162
+0.244168 0.0740676 -0.869967 -0.421964
+0.238801 0.0898784 -0.840506 -0.477959
+0.232412 0.105304 -0.807447 -0.531908
+0.225027 0.120279 -0.770929 -0.583578
+0.216678 0.134739 -0.731111 -0.63275
+0.207402 0.148622 -0.688162 -0.679212
+0.197238 0.161869 -0.642266 -0.722766
+0.186229 0.174422 -0.593619 -0.763225
+0.174422 0.186229 -0.542431 -0.800415
+0.161869 0.197238 -0.48892 -0.834178
+0.148622 0.207402 -0.433315 -0.864369
+0.134739 0.216678 -0.375855 -0.890858
+0.120279 0.225027 -0.316786 -0.913533
+0.105304 0.232412 -0.256359 -0.932296
+0.0898784 0.238801 -0.194835 -0.947067
+0.0740676 0.244168 -0.132477 -0.957782
+0.0579397 0.24849 -0.0695517 -0.964395
+0.0415636 0.251747 -0.00632817 -0.966879
+0.0250096 0.253927 0.0569223 -0.965223
+0.00834842 0.255019 0.119929 -0.959434
+-0.00834847 0.255019 0.182422 -0.949536
+-0.0250096 0.253927 0.244134 -0.935572
+-0.0415636 0.251747 0.304801 -0.917601
+-0.0579397 0.24849 0.364162 -0.895702
+-0.0740677 0.244168 0.421964 -0.869967
+-0.0898785 0.238801 0.477959 -0.840506
+-0.105304 0.232412 0.531908 -0.807447
+-0.120279 0.225027 0.583578 -0.770929
+-0.134739 0.216678 0.63275 -0.731111
+-0.148622 0.207402 0.679212 -0.688162
+-0.161869 0.197238 0.722766 -0.642266
+-0.174422 0.186229 0.763225 -0.593619
+-0.186229 0.174422 0.800415 -0.542431
+-0.197238 0.161869 0.834178 -0.48892
+-0.207402 0.148622 0.864369 -0.433316
+-0.216678 0.134739 0.890858 -0.375855
+-0.225027 0.120279 0.913533 -0.316786
+-0.232412 0.105304 0.932296 -0.256359
+-0.238801 0.0898784 0.947067 -0.194835
+-0.244168 0.0740676 0.957782 -0.132477
+-0.24849 0.0579397 0.964395 -0.0695516
+-0.251747 0.0415636 0.966879 -0.00632836
+-0.253927 0.0250096 0.965223 0.0569222
+-0.255019 0.00834847 0.959434 0.119929
+0.306022 0.0100181 -0.867117 -0.392885
+0.304712 0.0300115 -0.839564 -0.448756
+0.302097 0.0498763 -0.808416 -0.502706
+0.298188 0.0695276 -0.773807 -0.554502
+0.293002 0.0888812 -0.735884 -0.603924
+0.286561 0.107854 -0.69481 -0.65076
+0.278894 0.126365 -0.65076 -0.69481
+0.270032 0.144335 -0.603924 -0.735884
+0.260014 0.161687 -0.554502 -0.773807
+0.248882 0.178347 -0.502706 -0.808416
+0.236685 0.194242 -0.448756 -0.839564
+0.223474 0.209307 -0.392885 -0.867117
+0.209307 0.223474 -0.335332 -0.890956
+0.194242 0.236685 -0.276343 -0.91098
+0.178347 0.248882 -0.21617 -0.927103
+0.161687 0.260014 -0.155072 -0.939256
+0.144335 0.270032 -0.0933095 -0.947388
+0.126365 0.278894 -0.0311476 -0.951462
+0.107854 0.286562 0.0311477 -0.951462
+0.0888812 0.293002 0.0933096 -0.947388
+0.0695276 0.298188 0.155072 -0.939256
+0.0498763 0.302097 0.21617 -0.927103
+0.0300115 0.304712 0.276343 -0.91098
+0.0100181 0.306022 0.335332 -0.890956
+-0.0100182 0.306022 0.392885 -0.867116
+-0.0300115 0.304712 0.448756 -0.839564
+-0.0498764 0.302097 0.502706 -0.808416
+-0.0695276 0.298188 0.554502 -0.773807
+-0.0888812 0.293002 0.603925 -0.735884
+-0.107854 0.286561 0.650761 -0.69481
+-0.126365 0.278894 0.69481 -0.65076
+-0.144335 0.270032 0.735884 -0.603924
+-0.161687 0.260014 0.773807 -0.554502
+-0.178347 0.248882 0.808416 -0.502706
+-0.194242 0.236685 0.839564 -0.448756
+-0.209307 0.223474 0.867117 -0.392885
+-0.223474 0.209307 0.890956 -0.335332
+-0.236685 0.194242 0.91098 -0.276343
+-0.248882 0.178347 0.927103 -0.21617
+-0.260014 0.161687 0.939256 -0.155072
+-0.270032 0.144335 0.947388 -0.0933096
+-0.278894 0.126365 0.951462 -0.0311476
+-0.286562 0.107854 0.951462 0.0311477
+-0.293002 0.0888812 0.947388 0.0933095
+-0.298188 0.0695276 0.939256 0.155072
+-0.302097 0.0498764 0.927103 0.21617
+-0.304712 0.0300115 0.91098 0.276343
+-0.306022 0.0100182 0.890956 0.335332
+0.306022 0.0100181 -0.939256 -0.155072
+0.304712 0.0300115 -0.927103 -0.21617
+0.302097 0.0498763 -0.91098 -0.276343
+0.298188 0.0695276 -0.890956 -0.335332
+0.293002 0.0888812 -0.867117 -0.392885
+0.286561 0.107854 -0.839564 -0.448756
+0.278894 0.126365 -0.808416 -0.502706
+0.270032 0.144335 -0.773807 -0.554502
+0.260014 0.161687 -0.735884 -0.603924
+0.248882 0.178347 -0.69481 -0.65076
+0.236685 0.194242 -0.65076 -0.69481
+0.223474 0.209307 -0.603924 -0.735884
+0.209307 0.223474 -0.554502 -0.773807
+0.194242 0.236685 -0.502706 -0.808416
+0.178347 0.248882 -0.448756 -0.839564
+0.161687 0.260014 -0.392885 -0.867117
+0.144335 0.270032 -0.335332 -0.890956
+0.126365 0.278894 -0.276343 -0.91098
+0.107854 0.286562 -0.21617 -0.927103
+0.0888812 0.293002 -0.155072 -0.939256
+0.0695276 0.298188 -0.0933095 -0.947388
+0.0498763 0.302097 -0.0311475 -0.951462
+0.0300115 0.304712 0.0311478 -0.951462
+0.0100181 0.306022 0.0933096 -0.947388
+-0.0100182 0.306022 0.155072 -0.939256
+-0.0300115 0.304712 0.21617 -0.927103
+-0.0498764 0.302097 0.276343 -0.91098
+-0.0695276 0.298188 0.335332 -0.890956
+-0.0888812 0.293002 0.392886 -0.867116
+-0.107854 0.286561 0.448756 -0.839564
+-0.126365 0.278894 0.502706 -0.808416
+-0.144335 0.270032 0.554502 -0.773807
+-0.161687 0.260014 0.603924 -0.735884
+-0.178347 0.248882 0.650761 -0.69481
+-0.194242 0.236685 0.69481 -0.650761
+-0.209307 0.223474 0.735884 -0.603924
+-0.223474 0.209307 0.773807 -0.554502
+-0.236685 0.194242 0.808416 -0.502706
+-0.248882 0.178347 0.839564 -0.448756
+-0.260014 0.161687 0.867117 -0.392885
+-0.270032 0.144335 0.890956 -0.335332
+-0.278894 0.126365 0.91098 -0.276343
+-0.286562 0.107854 0.927103 -0.21617
+-0.293002 0.0888812 0.939256 -0.155072
+-0.298188 0.0695276 0.947388 -0.0933095
+-0.302097 0.0498764 0.951462 -0.0311477
+-0.304712 0.0300115 0.951462 0.0311477
+-0.306022 0.0100182 0.947388 0.0933095
+0.357026 0.0116878 -0.871042 -0.337168
+0.355497 0.0350134 -0.847125 -0.393415
+0.352446 0.0581891 -0.819581 -0.447977
+0.347886 0.0811156 -0.788527 -0.500621
+0.341836 0.103695 -0.754096 -0.551121
+0.334322 0.12583 -0.716437 -0.599262
+0.325376 0.147426 -0.675709 -0.644836
+0.315037 0.168391 -0.632088 -0.687649
+0.30335 0.188635 -0.58576 -0.727517
+0.290363 0.208071 -0.536924 -0.76427
+0.276133 0.226616 -0.485789 -0.79775
+0.26072 0.244191 -0.432574 -0.827814
+0.244191 0.26072 -0.377506 -0.854333
+0.226616 0.276133 -0.320821 -0.877194
+0.208071 0.290363 -0.262763 -0.896299
+0.188635 0.30335 -0.20358 -0.911565
+0.168391 0.315038 -0.143525 -0.922928
+0.147426 0.325376 -0.0828551 -0.930339
+0.12583 0.334322 -0.0218307 -0.933766
+0.103695 0.341836 0.0392873 -0.933195
+0.0811155 0.347886 0.100237 -0.928627
+0.058189 0.352446 0.160758 -0.920083
+0.0350134 0.355497 0.22059 -0.907599
+0.0116878 0.357026 0.279477 -0.891229
+-0.0116879 0.357026 0.337168 -0.871042
+-0.0350134 0.355497 0.393415 -0.847125
+-0.0581891 0.352446 0.447977 -0.819581
+-0.0811156 0.347886 0.500621 -0.788527
+-0.103695 0.341836 0.551121 -0.754096
+-0.12583 0.334322 0.599262 -0.716436
+-0.147426 0.325376 0.644836 -0.675709
+-0.168391 0.315038 0.687649 -0.632088
+-0.188635 0.30335 0.727517 -0.58576
+-0.208071 0.290363 0.76427 -0.536924
+-0.226616 0.276133 0.79775 -0.485789
+-0.244191 0.26072 0.827814 -0.432574
+-0.26072 0.244191 0.854333 -0.377506
+-0.276133 0.226616 0.877194 -0.320821
+-0.290363 0.208071 0.896299 -0.262763
+-0.30335 0.188635 0.911565 -0.20358
+-0.315037 0.168391 0.922928 -0.143525
+-0.325376 0.147426 0.930339 -0.0828552
+-0.334322 0.12583 0.933766 -0.0218306
+-0.341836 0.103695 0.933195 0.0392872
+-0.347886 0.0811155 0.928627 0.100237
+-0.352446 0.0581891 0.920083 0.160757
+-0.355497 0.0350134 0.907599 0.22059
+-0.357026 0.0116879 0.891229 0.279477
+0.357026 0.0116878 -0.774176 -0.522539
+0.355497 0.0350134 -0.738343 -0.572054
+0.352446 0.0581891 -0.699348 -0.619119
+0.347886 0.0811156 -0.657358 -0.663533
+0.341836 0.103695 -0.612553 -0.705106
+0.334322 0.12583 -0.565126 -0.743659
+0.325376 0.147426 -0.515278 -0.779028
+0.315037 0.168391 -0.463224 -0.811061
+0.30335 0.188635 -0.409186 -0.83962
+0.290363 0.208071 -0.353396 -0.864585
+0.276133 0.226616 -0.296093 -0.885847
+0.26072 0.244191 -0.237522 -0.903316
+0.244191 0.26072 -0.177934 -0.916916
+0.226616 0.276133 -0.117584 -0.92659
+0.208071 0.290363 -0.05673 -0.932297
+0.188635 0.30335 0.00436661 -0.934011
+0.168391 0.315038 0.0654445 -0.931726
+0.147426 0.325376 0.126242 -0.925451
+0.12583 0.334322 0.186499 -0.915212
+0.103695 0.341836 0.245958 -0.901055
+0.0811155 0.347886 0.304363 -0.88304
+0.058189 0.352446 0.361465 -0.861243
+0.0350134 0.355497 0.417019 -0.835758
+0.0116878 0.357026 0.470787 -0.806694
+-0.0116879 0.357026 0.522539 -0.774176
+-0.0350134 0.355497 0.572054 -0.738343
+-0.0581891 0.352446 0.619119 -0.699348
+-0.0811156 0.347886 0.663533 -0.657358
+-0.103695 0.341836 0.705106 -0.612553
+-0.12583 0.334322 0.743659 -0.565126
+-0.147426 0.325376 0.779028 -0.515278
+-0.168391 0.315038 0.811061 -0.463224
+-0.188635 0.30335 0.83962 -0.409186
+-0.208071 0.290363 0.864585 -0.353396
+-0.226616 0.276133 0.885847 -0.296093
+-0.244191 0.26072 0.903316 -0.237522
+-0.26072 0.244191 0.916916 -0.177934
+-0.276133 0.226616 0.92659 -0.117584
+-0.290363 0.208071 0.932297 -0.0567302
+-0.30335 0.188635 0.934011 0.00436654
+-0.315037 0.168391 0.931726 0.0654443
+-0.325376 0.147426 0.925451 0.126242
+-0.334322 0.12583 0.915212 0.186499
+-0.341836 0.103695 0.901055 0.245958
+-0.347886 0.0811155 0.88304 0.304363
+-0.352446 0.0581891 0.861243 0.361465
+-0.355497 0.0350134 0.835758 0.417019
+-0.357026 0.0116879 0.806694 0.470787
+0.40803 0.0133575 -0.686333 -0.601898
+0.406282 0.0400153 -0.645497 -0.645497
+0.402795 0.0665018 -0.601898 -0.686333
+0.397584 0.0927035 -0.555721 -0.724229
+0.390669 0.118508 -0.507164 -0.759024
+0.382082 0.143805 -0.456435 -0.790569
+0.371859 0.168487 -0.403752 -0.818729
+0.360043 0.192447 -0.349341 -0.843383
+0.346685 0.215583 -0.293433 -0.864425
+0.331843 0.237796 -0.236268 -0.881766
+0.31558 0.25899 -0.178092 -0.89533
+0.297966 0.279075 -0.119154 -0.905061
+0.279075 0.297966 -0.0597046 -0.910916
+0.25899 0.31558 2.0411e-09 -0.912871
+0.237796 0.331843 0.0597047 -0.910916
+0.215583 0.346685 0.119154 -0.905061
+0.192447 0.360043 0.178092 -0.89533
+0.168487 0.371859 0.236268 -0.881766
+0.143805 0.382082 0.293433 -0.864425
+0.118508 0.390669 0.349341 -0.843383
+0.0927035 0.397584 0.403753 -0.818729
+0.0665017 0.402795 0.456436 -0.790569
+0.0400153 0.406282 0.507164 -0.759024
+0.0133575 0.40803 0.555721 -0.724229
+-0.0133575 0.40803 0.601898 -0.686333
+-0.0400154 0.406282 0.645497 -0.645497
+-0.0665018 0.402795 0.686333 -0.601898
+-0.0927035 0.397584 0.724229 -0.555721
+-0.118508 0.390669 0.759025 -0.507164
+-0.143806 0.382082 0.790569 -0.456435
+-0.168487 0.371859 0.818729 -0.403752
+-0.192447 0.360043 0.843383 -0.349341
+-0.215583 0.346685 0.864425 -0.293433
+-0.237796 0.331843 0.881766 -0.236268
+-0.25899 0.31558 0.89533 -0.178092
+-0.279075 0.297966 0.905061 -0.119154
+-0.297966 0.279075 0.910916 -0.0597047
+-0.31558 0.25899 0.912871 4.1944e-08
+-0.331843 0.237796 0.910916 0.0597045
+-0.346685 0.215583 0.905061 0.119154
+-0.360043 0.192447 0.89533 0.178092
+-0.371859 0.168487 0.881766 0.236268
+-0.382082 0.143805 0.864425 0.293433
+-0.390669 0.118508 0.843383 0.349341
+-0.397584 0.0927035 0.818729 0.403753
+-0.402795 0.0665018 0.790569 0.456435
+-0.406282 0.0400153 0.759024 0.507164
+-0.40803 0.0133575 0.724229 0.555721
+0.40803 0.0133575 -0.790569 -0.456435
+0.406282 0.0400153 -0.759024 -0.507164
+0.402795 0.0665018 -0.724229 -0.555721
+0.397584 0.0927035 -0.686333 -0.601898
+0.390669 0.118508 -0.645497 -0.645497
+0.382082 0.143805 -0.601898 -0.686333
+0.371859 0.168487 -0.555721 -0.724229
+0.360043 0.192447 -0.507164 -0.759024
+0.346685 0.215583 -0.456435 -0.790569
+0.331843 0.237796 -0.403752 -0.818729
+0.31558 0.25899 -0.349341 -0.843383
+0.297966 0.279075 -0.293433 -0.864425
+0.279075 0.297966 -0.236268 -0.881766
+0.25899 0.31558 -0.178092 -0.89533
+0.237796 0.331843 -0.119154 -0.905061
+0.215583 0.346685 -0.0597045 -0.910916
+0.192447 0.360043 7.50431e-08 -0.912871
+0.168487 0.371859 0.0597047 -0.910916
+0.143805 0.382082 0.119154 -0.905061
+0.118508 0.390669 0.178092 -0.89533
+0.0927035 0.397584 0.236268 -0.881766
+0.0665017 0.402795 0.293433 -0.864425
+0.0400153 0.406282 0.349341 -0.843383
+0.0133575 0.40803 0.403753 -0.818729
+-0.0133575 0.40803 0.456436 -0.790569
+-0.0400154 0.406282 0.507164 -0.759024
+-0.0665018 0.402795 0.555721 -0.724229
+-0.0927035 0.397584 0.601898 -0.686333
+-0.118508 0.390669 0.645497 -0.645497
+-0.143806 0.382082 0.686333 -0.601898
+-0.168487 0.371859 0.724229 -0.555721
+-0.192447 0.360043 0.759024 -0.507164
+-0.215583 0.346685 0.790569 -0.456435
+-0.237796 0.331843 0.818729 -0.403752
+-0.25899 0.31558 0.843383 -0.349341
+-0.279075 0.297966 0.864425 -0.293433
+-0.297966 0.279075 0.881766 -0.236268
+-0.31558 0.25899 0.89533 -0.178092
+-0.331843 0.237796 0.905061 -0.119154
+-0.346685 0.215583 0.910916 -0.0597046
+-0.360043 0.192447 0.912871 -1.02699e-07
+-0.371859 0.168487 0.910916 0.0597046
+-0.382082 0.143805 0.905061 0.119154
+-0.390669 0.118508 0.89533 0.178092
+-0.397584 0.0927035 0.881766 0.236268
+-0.402795 0.0665018 0.864425 0.293433
+-0.406282 0.0400153 0.843383 0.349341
+-0.40803 0.0133575 0.818729 0.403752
+0.456191 0.0149342 -0.723236 -0.518263
+0.454238 0.0447385 -0.687791 -0.564456
+0.450339 0.0743513 -0.649401 -0.608231
+0.444512 0.103646 -0.608231 -0.649401
+0.436782 0.132496 -0.564456 -0.687791
+0.427181 0.160779 -0.518263 -0.723236
+0.415751 0.188374 -0.469852 -0.755583
+0.40254 0.215162 -0.419428 -0.784695
+0.387606 0.241029 -0.367209 -0.810447
+0.371012 0.265863 -0.313417 -0.832728
+0.352829 0.28956 -0.258283 -0.851444
+0.333136 0.312016 -0.202043 -0.866513
+0.312016 0.333136 -0.144937 -0.877872
+0.28956 0.352829 -0.0872114 -0.885472
+0.265863 0.371012 -0.029112 -0.88928
+0.241029 0.387606 0.0291121 -0.88928
+0.215162 0.40254 0.0872115 -0.885472
+0.188374 0.415751 0.144937 -0.877872
+0.160779 0.427181 0.202043 -0.866513
+0.132496 0.436782 0.258283 -0.851444
+0.103646 0.444512 0.313417 -0.832728
+0.0743512 0.450339 0.367209 -0.810447
+0.0447384 0.454238 0.419428 -0.784695
+0.0149341 0.456191 0.469852 -0.755583
+-0.0149342 0.456191 0.518263 -0.723236
+-0.0447385 0.454238 0.564456 -0.687791
+-0.0743513 0.450339 0.608231 -0.649401
+-0.103646 0.444512 0.649401 -0.608231
+-0.132496 0.436781 0.687791 -0.564455
+-0.160779 0.427181 0.723236 -0.518263
+-0.188374 0.415751 0.755583 -0.469852
+-0.215162 0.40254 0.784695 -0.419428
+-0.241029 0.387606 0.810447 -0.367209
+-0.265864 0.371012 0.832728 -0.313417
+-0.28956 0.352829 0.851444 -0.258283
+-0.312016 0.333136 0.866513 -0.202043
+-0.333136 0.312016 0.877872 -0.144937
+-0.352829 0.28956 0.885472 -0.0872113
+-0.371012 0.265864 0.88928 -0.0291121
+-0.387606 0.241029 0.88928 0.029112
+-0.40254 0.215162 0.885472 0.0872113
+-0.415751 0.188374 0.877872 0.144937
+-0.427181 0.160779 0.866513 0.202043
+-0.436782 0.132496 0.851444 0.258283
+-0.444512 0.103646 0.832728 0.313417
+-0.450339 0.0743513 0.810447 0.367209
+-0.454238 0.0447385 0.784695 0.419428
+-0.456191 0.0149342 0.755583 0.469852
+0.357026 0.0116878 -0.92423 -0.134889
+0.355497 0.0350134 -0.913429 -0.195048
+0.352446 0.0581891 -0.898716 -0.254372
+0.347886 0.0811156 -0.880155 -0.312606
+0.341836 0.103695 -0.857825 -0.369501
+0.334322 0.12583 -0.831822 -0.424815
+0.325376 0.147426 -0.802257 -0.478309
+0.315037 0.168391 -0.769256 -0.529755
+0.30335 0.188635 -0.732962 -0.578933
+0.290363 0.208071 -0.693528 -0.625631
+0.276133 0.226616 -0.651125 -0.66965
+0.26072 0.244191 -0.605934 -0.710802
+0.244191 0.26072 -0.558148 -0.74891
+0.226616 0.276133 -0.507972 -0.783811
+0.208071 0.290363 -0.45562 -0.815356
+0.188635 0.30335 -0.401318 -0.84341
+0.168391 0.315038 -0.345297 -0.867851
+0.147426 0.325376 -0.287798 -0.888577
+0.12583 0.334322 -0.229066 -0.905497
+0.103695 0.341836 -0.169353 -0.91854
+0.0811155 0.347886 -0.108915 -0.927649
+0.058189 0.352446 -0.0480106 -0.932787
+0.0350134 0.355497 0.0130993 -0.933929
+0.0116878 0.357026 0.0741532 -0.931073
+-0.0116879 0.357026 0.134889 -0.92423
+-0.0350134 0.355497 0.195048 -0.913429
+-0.0581891 0.352446 0.254372 -0.898716
+-0.0811156 0.347886 0.312606 -0.880155
+-0.103695 0.341836 0.369502 -0.857825
+-0.12583 0.334322 0.424815 -0.831822
+-0.147426 0.325376 0.478309 -0.802257
+-0.168391 0.315038 0.529755 -0.769257
+-0.188635 0.30335 0.578933 -0.732962
+-0.208071 0.290363 0.625631 -0.693528
+-0.226616 0.276133 0.66965 -0.651125
+-0.244191 0.26072 0.710802 -0.605934
+-0.26072 0.244191 0.74891 -0.558148
+-0.276133 0.226616 0.783812 -0.507972
+-0.290363 0.208071 0.815356 -0.455621
+-0.30335 0.188635 0.84341 -0.401318
+-0.315037 0.168391 0.867851 -0.345297
+-0.325376 0.147426 0.888577 -0.287798
+-0.334322 0.12583 0.905497 -0.229066
+-0.341836 0.103695 0.91854 -0.169353
+-0.347886 0.0811155 0.927649 -0.108915
+-0.352446 0.0581891 0.932787 -0.0480108
+-0.355497 0.0350134 0.933929 0.0130992
+-0.357026 0.0116879 0.931073 0.074153
+0.40803 0.0133575 -0.864425 -0.293433
+0.406282 0.0400153 -0.843383 -0.349341
+0.402795 0.0665018 -0.818729 -0.403752
+0.397584 0.0927035 -0.790569 -0.456435
+0.390669 0.118508 -0.759024 -0.507164
+0.382082 0.143805 -0.724229 -0.555721
+0.371859 0.168487 -0.686333 -0.601898
+0.360043 0.192447 -0.645497 -0.645497
+0.346685 0.215583 -0.601898 -0.686333
+0.331843 0.237796 -0.555721 -0.724229
+0.31558 0.25899 -0.507164 -0.759024
+0.297966 0.279075 -0.456435 -0.790569
+0.279075 0.297966 -0.403752 -0.818729
+0.25899 0.31558 -0.349341 -0.843383
+0.237796 0.331843 -0.293433 -0.864425
+0.215583 0.346685 -0.236268 -0.881766
+0.192447 0.360043 -0.178092 -0.89533
+0.168487 0.371859 -0.119154 -0.905061
+0.143805 0.382082 -0.0597046 -0.910916
+0.118508 0.390669 3.92225e-08 -0.912871
+0.0927035 0.397584 0.0597046 -0.910916
+0.0665017 0.402795 0.119154 -0.905061
+0.0400153 0.406282 0.178092 -0.89533
+0.0133575 0.40803 0.236268 -0.881766
+-0.0133575 0.40803 0.293433 -0.864425
+-0.0400154 0.406282 0.349341 -0.843383
+-0.0665018 0.402795 0.403753 -0.818729
+-0.0927035 0.397584 0.456436 -0.790569
+-0.118508 0.390669 0.507164 -0.759024
+-0.143806 0.382082 0.555721 -0.724229
+-0.168487 0.371859 0.601898 -0.686333
+-0.192447 0.360043 0.645497 -0.645497
+-0.215583 0.346685 0.686333 -0.601898
+-0.237796 0.331843 0.724229 -0.555721
+-0.25899 0.31558 0.759024 -0.507164
+-0.279075 0.297966 0.790569 -0.456435
+-0.297966 0.279075 0.818729 -0.403753
+-0.31558 0.25899 0.843383 -0.349341
+-0.331843 0.237796 0.864425 -0.293433
+-0.346685 0.215583 0.881766 -0.236268
+-0.360043 0.192447 0.89533 -0.178092
+-0.371859 0.168487 0.905061 -0.119154
+-0.382082 0.143805 0.910916 -0.0597045
+-0.390669 0.118508 0.912871 -2.96973e-08
+-0.397584 0.0927035 0.910916 0.0597047
+-0.402795 0.0665018 0.905061 0.119154
+-0.406282 0.0400153 0.89533 0.178092
+-0.40803 0.0133575 0.881766 0.236268
+0.40803 0.0133575 -0.905061 -0.119154
+0.406282 0.0400153 -0.89533 -0.178092
+0.402795 0.0665018 -0.881766 -0.236268
+0.397584 0.0927035 -0.864425 -0.293433
+0.390669 0.118508 -0.843383 -0.349341
+0.382082 0.143805 -0.818729 -0.403752
+0.371859 0.168487 -0.790569 -0.456435
+0.360043 0.192447 -0.759024 -0.507164
+0.346685 0.215583 -0.724229 -0.555721
+0.331843 0.237796 -0.686333 -0.601898
+0.31558 0.25899 -0.645497 -0.645497
+0.297966 0.279075 -0.601898 -0.686333
+0.279075 0.297966 -0.555721 -0.724229
+0.25899 0.31558 -0.507164 -0.759024
+0.237796 0.331843 -0.456435 -0.790569
+0.215583 0.346685 -0.403752 -0.818729
+0.192447 0.360043 -0.349341 -0.843383
+0.168487 0.371859 -0.293433 -0.864425
+0.143805 0.382082 -0.236268 -0.881766
+0.118508 0.390669 -0.178092 -0.89533
+0.0927035 0.397584 -0.119154 -0.905061
+0.0665017 0.402795 -0.0597045 -0.910916
+0.0400153 0.406282 1.12225e-07 -0.912871
+0.0133575 0.40803 0.0597047 -0.910916
+-0.0133575 0.40803 0.119154 -0.905061
+-0.0400154 0.406282 0.178092 -0.89533
+-0.0665018 0.402795 0.236268 -0.881766
+-0.0927035 0.397584 0.293433 -0.864425
+-0.118508 0.390669 0.349341 -0.843383
+-0.143806 0.382082 0.403753 -0.818729
+-0.168487 0.371859 0.456436 -0.790569
+-0.192447 0.360043 0.507164 -0.759024
+-0.215583 0.346685 0.555721 -0.724229
+-0.237796 0.331843 0.601898 -0.686333
+-0.25899 0.31558 0.645497 -0.645497
+-0.279075 0.297966 0.686333 -0.601898
+-0.297966 0.279075 0.724229 -0.555721
+-0.31558 0.25899 0.759024 -0.507164
+-0.331843 0.237796 0.790569 -0.456436
+-0.346685 0.215583 0.818729 -0.403752
+-0.360043 0.192447 0.843383 -0.349341
+-0.371859 0.168487 0.864425 -0.293433
+-0.382082 0.143805 0.881766 -0.236268
+-0.390669 0.118508 0.89533 -0.178092
+-0.397584 0.0927035 0.905061 -0.119153
+-0.402795 0.0665018 0.910916 -0.0597047
+-0.406282 0.0400153 0.912871 4.33047e-08
+-0.40803 0.0133575 0.910916 0.0597045
+0.456191 0.0149342 -0.866513 -0.202043
+0.454238 0.0447385 -0.851444 -0.258283
+0.450339 0.0743513 -0.832728 -0.313417
+0.444512 0.103646 -0.810447 -0.367209
+0.436782 0.132496 -0.784695 -0.419428
+0.427181 0.160779 -0.755583 -0.469852
+0.415751 0.188374 -0.723236 -0.518263
+0.40254 0.215162 -0.687791 -0.564456
+0.387606 0.241029 -0.649401 -0.608231
+0.371012 0.265863 -0.608231 -0.649401
+0.352829 0.28956 -0.564456 -0.687791
+0.333136 0.312016 -0.518263 -0.723236
+0.312016 0.333136 -0.469852 -0.755583
+0.28956 0.352829 -0.419428 -0.784695
+0.265863 0.371012 -0.367209 -0.810447
+0.241029 0.387606 -0.313417 -0.832728
+0.215162 0.40254 -0.258283 -0.851444
+0.188374 0.415751 -0.202043 -0.866513
+0.160779 0.427181 -0.144937 -0.877872
+0.132496 0.436782 -0.0872114 -0.885472
+0.103646 0.444512 -0.029112 -0.88928
+0.0743512 0.450339 0.0291121 -0.88928
+0.0447384 0.454238 0.0872115 -0.885472
+0.0149341 0.456191 0.144937 -0.877872
+-0.0149342 0.456191 0.202043 -0.866513
+-0.0447385 0.454238 0.258283 -0.851444
+-0.0743513 0.450339 0.313417 -0.832728
+-0.103646 0.444512 0.367209 -0.810447
+-0.132496 0.436781 0.419428 -0.784695
+-0.160779 0.427181 0.469852 -0.755583
+-0.188374 0.415751 0.518263 -0.723236
+-0.215162 0.40254 0.564455 -0.687791
+-0.241029 0.387606 0.608231 -0.649401
+-0.265864 0.371012 0.649401 -0.608231
+-0.28956 0.352829 0.687791 -0.564456
+-0.312016 0.333136 0.723236 -0.518263
+-0.333136 0.312016 0.755583 -0.469852
+-0.352829 0.28956 0.784695 -0.419428
+-0.371012 0.265864 0.810447 -0.367209
+-0.387606 0.241029 0.832728 -0.313417
+-0.40254 0.215162 0.851444 -0.258283
+-0.415751 0.188374 0.866513 -0.202043
+-0.427181 0.160779 0.877872 -0.144937
+-0.436782 0.132496 0.885472 -0.0872114
+-0.444512 0.103646 0.88928 -0.029112
+-0.450339 0.0743513 0.88928 0.029112
+-0.454238 0.0447385 0.885472 0.0872114
+-0.456191 0.0149342 0.877872 0.144937
+0.456191 0.0149342 -0.810447 -0.367209
+0.454238 0.0447385 -0.784695 -0.419428
+0.450339 0.0743513 -0.755583 -0.469852
+0.444512 0.103646 -0.723236 -0.518263
+0.436782 0.132496 -0.687791 -0.564456
+0.427181 0.160779 -0.649401 -0.608231
+0.415751 0.188374 -0.608231 -0.649401
+0.40254 0.215162 -0.564456 -0.687791
+0.387606 0.241029 -0.518263 -0.723236
+0.371012 0.265863 -0.469852 -0.755583
+0.352829 0.28956 -0.419428 -0.784695
+0.333136 0.312016 -0.367209 -0.810447
+0.312016 0.333136 -0.313417 -0.832728
+0.28956 0.352829 -0.258283 -0.851444
+0.265863 0.371012 -0.202043 -0.866513
+0.241029 0.387606 -0.144937 -0.877872
+0.215162 0.40254 -0.0872113 -0.885472
+0.188374 0.415751 -0.029112 -0.88928
+0.160779 0.427181 0.0291121 -0.88928
+0.132496 0.436782 0.0872114 -0.885472
+0.103646 0.444512 0.144937 -0.877872
+0.0743512 0.450339 0.202043 -0.866513
+0.0447384 0.454238 0.258283 -0.851444
+0.0149341 0.456191 0.313417 -0.832728
+-0.0149342 0.456191 0.367209 -0.810447
+-0.0447385 0.454238 0.419428 -0.784695
+-0.0743513 0.450339 0.469852 -0.755583
+-0.103646 0.444512 0.518263 -0.723236
+-0.132496 0.436781 0.564456 -0.687791
+-0.160779 0.427181 0.608231 -0.649401
+-0.188374 0.415751 0.649401 -0.608231
+-0.215162 0.40254 0.687791 -0.564456
+-0.241029 0.387606 0.723236 -0.518263
+-0.265864 0.371012 0.755583 -0.469852
+-0.28956 0.352829 0.784695 -0.419428
+-0.312016 0.333136 0.810447 -0.367209
+-0.333136 0.312016 0.832728 -0.313417
+-0.352829 0.28956 0.851444 -0.258283
+-0.371012 0.265864 0.866513 -0.202043
+-0.387606 0.241029 0.877872 -0.144937
+-0.40254 0.215162 0.885472 -0.0872115
+-0.415751 0.188374 0.88928 -0.029112
+-0.427181 0.160779 0.88928 0.0291121
+-0.436782 0.132496 0.885472 0.0872114
+-0.444512 0.103646 0.877872 0.144937
+-0.450339 0.0743513 0.866513 0.202043
+-0.454238 0.0447385 0.851444 0.258283
+-0.456191 0.0149342 0.832728 0.313417
+0.499732 0.0163595 -0.75 -0.433013
+0.497592 0.0490086 -0.720074 -0.481138
+0.493322 0.0814477 -0.687064 -0.527203
+0.486938 0.113538 -0.651112 -0.57101
+0.47847 0.145142 -0.612372 -0.612372
+0.467953 0.176125 -0.57101 -0.651112
+0.455432 0.206354 -0.527203 -0.687064
+0.440961 0.235698 -0.481138 -0.720074
+0.424601 0.264034 -0.433013 -0.75
+0.406423 0.291239 -0.383033 -0.776715
+0.386505 0.317197 -0.331414 -0.800103
+0.364932 0.341796 -0.278375 -0.820066
+0.341796 0.364932 -0.224144 -0.836516
+0.317197 0.386505 -0.168953 -0.849385
+0.291239 0.406423 -0.113039 -0.858616
+0.264034 0.424601 -0.0566407 -0.864171
+0.235698 0.440961 7.11922e-08 -0.866025
+0.206353 0.455432 0.0566408 -0.864171
+0.176125 0.467953 0.113039 -0.858616
+0.145142 0.47847 0.168953 -0.849385
+0.113538 0.486938 0.224144 -0.836516
+0.0814477 0.493322 0.278375 -0.820066
+0.0490085 0.497592 0.331414 -0.800103
+0.0163595 0.499732 0.383033 -0.776715
+-0.0163596 0.499732 0.433013 -0.75
+-0.0490086 0.497592 0.481138 -0.720074
+-0.0814478 0.493322 0.527203 -0.687064
+-0.113538 0.486938 0.57101 -0.651112
+-0.145142 0.47847 0.612373 -0.612372
+-0.176125 0.467953 0.651112 -0.57101
+-0.206354 0.455432 0.687064 -0.527203
+-0.235698 0.440961 0.720074 -0.481138
+-0.264034 0.424601 0.75 -0.433013
+-0.291239 0.406423 0.776715 -0.383033
+-0.317197 0.386505 0.800103 -0.331414
+-0.341796 0.364932 0.820066 -0.278375
+-0.364932 0.341796 0.836516 -0.224144
+-0.386505 0.317197 0.849385 -0.168953
+-0.406423 0.291239 0.858616 -0.113039
+-0.424601 0.264034 0.864171 -0.0566408
+-0.440961 0.235698 0.866025 -9.74292e-08
+-0.455432 0.206354 0.864171 0.0566408
+-0.467953 0.176125 0.858616 0.113039
+-0.47847 0.145142 0.849385 0.168953
+-0.486938 0.113538 0.836516 0.224144
+-0.493322 0.0814478 0.820066 0.278375
+-0.497592 0.0490085 0.800103 0.331414
+-0.499732 0.0163596 0.776715 0.383033
+0.499732 0.0163595 -0.820066 -0.278375
+0.497592 0.0490086 -0.800103 -0.331414
+0.493322 0.0814477 -0.776715 -0.383033
+0.486938 0.113538 -0.75 -0.433013
+0.47847 0.145142 -0.720074 -0.481138
+0.467953 0.176125 -0.687064 -0.527203
+0.455432 0.206354 -0.651112 -0.57101
+0.440961 0.235698 -0.612372 -0.612372
+0.424601 0.264034 -0.57101 -0.651112
+0.406423 0.291239 -0.527203 -0.687064
+0.386505 0.317197 -0.481138 -0.720074
+0.364932 0.341796 -0.433013 -0.75
+0.341796 0.364932 -0.383033 -0.776715
+0.317197 0.386505 -0.331414 -0.800103
+0.291239 0.406423 -0.278375 -0.820066
+0.264034 0.424601 -0.224144 -0.836516
+0.235698 0.440961 -0.168953 -0.849385
+0.206353 0.455432 -0.113039 -0.858616
+0.176125 0.467953 -0.0566407 -0.864171
+0.145142 0.47847 3.72097e-08 -0.866025
+0.113538 0.486938 0.0566408 -0.864171
+0.0814477 0.493322 0.113039 -0.858616
+0.0490085 0.497592 0.168953 -0.849385
+0.0163595 0.499732 0.224144 -0.836516
+-0.0163596 0.499732 0.278375 -0.820066
+-0.0490086 0.497592 0.331414 -0.800103
+-0.0814478 0.493322 0.383033 -0.776715
+-0.113538 0.486938 0.433013 -0.75
+-0.145142 0.47847 0.481138 -0.720074
+-0.176125 0.467953 0.527203 -0.687064
+-0.206354 0.455432 0.57101 -0.651112
+-0.235698 0.440961 0.612372 -0.612373
+-0.264034 0.424601 0.651112 -0.57101
+-0.291239 0.406423 0.687064 -0.527203
+-0.317197 0.386505 0.720074 -0.481138
+-0.341796 0.364932 0.75 -0.433013
+-0.364932 0.341796 0.776715 -0.383033
+-0.386505 0.317197 0.800103 -0.331414
+-0.406423 0.291239 0.820066 -0.278375
+-0.424601 0.264034 0.836516 -0.224144
+-0.440961 0.235698 0.849385 -0.168953
+-0.455432 0.206354 0.858616 -0.113039
+-0.467953 0.176125 0.864171 -0.0566407
+-0.47847 0.145142 0.866025 -2.81734e-08
+-0.486938 0.113538 0.864171 0.0566408
+-0.493322 0.0814478 0.858616 0.113039
+-0.497592 0.0490085 0.849385 0.168953
+-0.499732 0.0163596 0.836516 0.224144
+0.539773 0.0176703 -0.766606 -0.347345
+0.537461 0.0529353 -0.742247 -0.396739
+0.532848 0.0879736 -0.71471 -0.444435
+0.525954 0.122635 -0.684112 -0.490228
+0.516807 0.156772 -0.650585 -0.533922
+0.505447 0.190237 -0.614272 -0.575329
+0.491923 0.222887 -0.575329 -0.614272
+0.476292 0.254583 -0.533921 -0.650585
+0.458622 0.285189 -0.490228 -0.684112
+0.438987 0.314574 -0.444435 -0.71471
+0.417473 0.342612 -0.396739 -0.742247
+0.394172 0.369182 -0.347345 -0.766606
+0.369182 0.394172 -0.296463 -0.787682
+0.342612 0.417473 -0.244311 -0.805385
+0.314574 0.438987 -0.191113 -0.81964
+0.285189 0.458622 -0.137097 -0.830384
+0.254583 0.476292 -0.0824936 -0.837573
+0.222887 0.491923 -0.0275372 -0.841175
+0.190237 0.505447 0.0275373 -0.841175
+0.156772 0.516807 0.0824938 -0.837573
+0.122635 0.525954 0.137097 -0.830384
+0.0879735 0.532848 0.191113 -0.81964
+0.0529352 0.537461 0.244311 -0.805385
+0.0176703 0.539773 0.296463 -0.787682
+-0.0176704 0.539773 0.347345 -0.766606
+-0.0529354 0.537461 0.39674 -0.742247
+-0.0879736 0.532848 0.444435 -0.71471
+-0.122635 0.525954 0.490228 -0.684112
+-0.156772 0.516807 0.533922 -0.650585
+-0.190237 0.505447 0.575329 -0.614272
+-0.222887 0.491923 0.614272 -0.575329
+-0.254583 0.476292 0.650585 -0.533922
+-0.285189 0.458622 0.684112 -0.490228
+-0.314574 0.438987 0.71471 -0.444435
+-0.342612 0.417473 0.742247 -0.396739
+-0.369182 0.394172 0.766606 -0.347345
+-0.394172 0.369182 0.787682 -0.296463
+-0.417473 0.342612 0.805385 -0.244311
+-0.438987 0.314574 0.81964 -0.191113
+-0.458622 0.285189 0.830384 -0.137097
+-0.476292 0.254583 0.837573 -0.0824938
+-0.491923 0.222887 0.841175 -0.0275372
+-0.505447 0.190237 0.841175 0.0275373
+-0.516807 0.156772 0.837573 0.0824937
+-0.525954 0.122635 0.830384 0.137097
+-0.532848 0.0879736 0.81964 0.191113
+-0.537461 0.0529353 0.805385 0.244311
+-0.539773 0.0176704 0.787682 0.296463
+0.456191 0.0149342 -0.608231 -0.649401
+0.454238 0.0447385 -0.564456 -0.687791
+0.450339 0.0743513 -0.518263 -0.723236
+0.444512 0.103646 -0.469852 -0.755583
+0.436782 0.132496 -0.419428 -0.784695
+0.427181 0.160779 -0.367209 -0.810447
+0.415751 0.188374 -0.313417 -0.832728
+0.40254 0.215162 -0.258283 -0.851444
+0.387606 0.241029 -0.202043 -0.866513
+0.371012 0.265863 -0.144937 -0.877872
+0.352829 0.28956 -0.0872114 -0.885472
+0.333136 0.312016 -0.029112 -0.88928
+0.312016 0.333136 0.029112 -0.88928
+0.28956 0.352829 0.0872114 -0.885472
+0.265863 0.371012 0.144937 -0.877872
+0.241029 0.387606 0.202043 -0.866513
+0.215162 0.40254 0.258283 -0.851444
+0.188374 0.415751 0.313417 -0.832728
+0.160779 0.427181 0.367209 -0.810447
+0.132496 0.436782 0.419428 -0.784695
+0.103646 0.444512 0.469852 -0.755583
+0.0743512 0.450339 0.518263 -0.723236
+0.0447384 0.454238 0.564456 -0.687791
+0.0149341 0.456191 0.608231 -0.649401
+-0.0149342 0.456191 0.649401 -0.608231
+-0.0447385 0.454238 0.687791 -0.564456
+-0.0743513 0.450339 0.723236 -0.518263
+-0.103646 0.444512 0.755583 -0.469852
+-0.132496 0.436781 0.784695 -0.419428
+-0.160779 0.427181 0.810447 -0.367209
+-0.188374 0.415751 0.832728 -0.313417
+-0.215162 0.40254 0.851444 -0.258283
+-0.241029 0.387606 0.866513 -0.202043
+-0.265864 0.371012 0.877872 -0.144937
+-0.28956 0.352829 0.885472 -0.0872114
+-0.312016 0.333136 0.88928 -0.029112
+-0.333136 0.312016 0.88928 0.029112
+-0.352829 0.28956 0.885472 0.0872114
+-0.371012 0.265864 0.877872 0.144937
+-0.387606 0.241029 0.866513 0.202043
+-0.40254 0.215162 0.851444 0.258283
+-0.415751 0.188374 0.832728 0.313417
+-0.427181 0.160779 0.810447 0.367209
+-0.436782 0.132496 0.784695 0.419428
+-0.444512 0.103646 0.755583 0.469852
+-0.450339 0.0743513 0.723236 0.518263
+-0.454238 0.0447385 0.687791 0.564456
+-0.456191 0.0149342 0.649401 0.608231
+0.499732 0.0163595 -0.527203 -0.687064
+0.497592 0.0490086 -0.481138 -0.720074
+0.493322 0.0814477 -0.433013 -0.75
+0.486938 0.113538 -0.383033 -0.776715
+0.47847 0.145142 -0.331414 -0.800103
+0.467953 0.176125 -0.278375 -0.820066
+0.455432 0.206354 -0.224144 -0.836516
+0.440961 0.235698 -0.168953 -0.849385
+0.424601 0.264034 -0.113039 -0.858616
+0.406423 0.291239 -0.0566408 -0.864171
+0.386505 0.317197 -1.57003e-08 -0.866025
+0.364932 0.341796 0.0566408 -0.864171
+0.341796 0.364932 0.113039 -0.858616
+0.317197 0.386505 0.168953 -0.849385
+0.291239 0.406423 0.224144 -0.836516
+0.264034 0.424601 0.278375 -0.820066
+0.235698 0.440961 0.331414 -0.800103
+0.206353 0.455432 0.383033 -0.776715
+0.176125 0.467953 0.433013 -0.75
+0.145142 0.47847 0.481138 -0.720074
+0.113538 0.486938 0.527203 -0.687064
+0.0814477 0.493322 0.57101 -0.651112
+0.0490085 0.497592 0.612373 -0.612372
+0.0163595 0.499732 0.651112 -0.57101
+-0.0163596 0.499732 0.687064 -0.527203
+-0.0490086 0.497592 0.720074 -0.481138
+-0.0814478 0.493322 0.75 -0.433013
+-0.113538 0.486938 0.776715 -0.383033
+-0.145142 0.47847 0.800103 -0.331413
+-0.176125 0.467953 0.820066 -0.278375
+-0.206354 0.455432 0.836516 -0.224144
+-0.235698 0.440961 0.849385 -0.168953
+-0.264034 0.424601 0.858616 -0.113039
+-0.291239 0.406423 0.864171 -0.0566407
+-0.317197 0.386505 0.866025 -2.94643e-08
+-0.341796 0.364932 0.864171 0.0566408
+-0.364932 0.341796 0.858616 0.113039
+-0.386505 0.317197 0.849385 0.168953
+-0.406423 0.291239 0.836516 0.224144
+-0.424601 0.264034 0.820066 0.278375
+-0.440961 0.235698 0.800103 0.331413
+-0.455432 0.206354 0.776715 0.383033
+-0.467953 0.176125 0.75 0.433013
+-0.47847 0.145142 0.720074 0.481138
+-0.486938 0.113538 0.687064 0.527203
+-0.493322 0.0814478 0.651112 0.57101
+-0.497592 0.0490085 0.612372 0.612372
+-0.499732 0.0163596 0.57101 0.651112
+0.499732 0.0163595 -0.651112 -0.57101
+0.497592 0.0490086 -0.612372 -0.612372
+0.493322 0.0814477 -0.57101 -0.651112
+0.486938 0.113538 -0.527203 -0.687064
+0.47847 0.145142 -0.481138 -0.720074
+0.467953 0.176125 -0.433013 -0.75
+0.455432 0.206354 -0.383033 -0.776715
+0.440961 0.235698 -0.331414 -0.800103
+0.424601 0.264034 -0.278375 -0.820066
+0.406423 0.291239 -0.224144 -0.836516
+0.386505 0.317197 -0.168953 -0.849385
+0.364932 0.341796 -0.113039 -0.858616
+0.341796 0.364932 -0.0566408 -0.864171
+0.317197 0.386505 1.93636e-09 -0.866025
+0.291239 0.406423 0.0566408 -0.864171
+0.264034 0.424601 0.113039 -0.858616
+0.235698 0.440961 0.168953 -0.849385
+0.206353 0.455432 0.224144 -0.836516
+0.176125 0.467953 0.278375 -0.820066
+0.145142 0.47847 0.331414 -0.800103
+0.113538 0.486938 0.383033 -0.776715
+0.0814477 0.493322 0.433013 -0.75
+0.0490085 0.497592 0.481138 -0.720074
+0.0163595 0.499732 0.527203 -0.687064
+-0.0163596 0.499732 0.57101 -0.651112
+-0.0490086 0.497592 0.612372 -0.612372
+-0.0814478 0.493322 0.651112 -0.57101
+-0.113538 0.486938 0.687064 -0.527203
+-0.145142 0.47847 0.720074 -0.481138
+-0.176125 0.467953 0.75 -0.433013
+-0.206354 0.455432 0.776715 -0.383033
+-0.235698 0.440961 0.800103 -0.331414
+-0.264034 0.424601 0.820066 -0.278375
+-0.291239 0.406423 0.836516 -0.224144
+-0.317197 0.386505 0.849385 -0.168953
+-0.341796 0.364932 0.858616 -0.113039
+-0.364932 0.341796 0.864171 -0.0566408
+-0.386505 0.317197 0.866025 3.97915e-08
+-0.406423 0.291239 0.864171 0.0566407
+-0.424601 0.264034 0.858616 0.113039
+-0.440961 0.235698 0.849385 0.168953
+-0.455432 0.206354 0.836516 0.224144
+-0.467953 0.176125 0.820066 0.278375
+-0.47847 0.145142 0.800103 0.331414
+-0.486938 0.113538 0.776715 0.383033
+-0.493322 0.0814478 0.75 0.433013
+-0.497592 0.0490085 0.720074 0.481138
+-0.499732 0.0163596 0.687064 0.527203
+0.539773 0.0176703 -0.575329 -0.614272
+0.537461 0.0529353 -0.533922 -0.650585
+0.532848 0.0879736 -0.490228 -0.684112
+0.525954 0.122635 -0.444435 -0.71471
+0.516807 0.156772 -0.396739 -0.742247
+0.505447 0.190237 -0.347345 -0.766606
+0.491923 0.222887 -0.296463 -0.787682
+0.476292 0.254583 -0.244311 -0.805385
+0.458622 0.285189 -0.191113 -0.81964
+0.438987 0.314574 -0.137097 -0.830384
+0.417473 0.342612 -0.0824937 -0.837573
+0.394172 0.369182 -0.0275372 -0.841175
+0.369182 0.394172 0.0275372 -0.841175
+0.342612 0.417473 0.0824937 -0.837573
+0.314574 0.438987 0.137097 -0.830384
+0.285189 0.458622 0.191113 -0.81964
+0.254583 0.476292 0.244311 -0.805385
+0.222887 0.491923 0.296463 -0.787682
+0.190237 0.505447 0.347345 -0.766606
+0.156772 0.516807 0.39674 -0.742247
+0.122635 0.525954 0.444435 -0.71471
+0.0879735 0.532848 0.490228 -0.684112
+0.0529352 0.537461 0.533922 -0.650585
+0.0176703 0.539773 0.575329 -0.614272
+-0.0176704 0.539773 0.614272 -0.575329
+-0.0529354 0.537461 0.650585 -0.533921
+-0.0879736 0.532848 0.684112 -0.490228
+-0.122635 0.525954 0.71471 -0.444435
+-0.156772 0.516807 0.742247 -0.396739
+-0.190237 0.505447 0.766606 -0.347345
+-0.222887 0.491923 0.787682 -0.296462
+-0.254583 0.476292 0.805385 -0.244311
+-0.285189 0.458622 0.81964 -0.191113
+-0.314574 0.438987 0.830384 -0.137097
+-0.342612 0.417473 0.837573 -0.0824937
+-0.369182 0.394172 0.841175 -0.0275372
+-0.394172 0.369182 0.841175 0.0275372
+-0.417473 0.342612 0.837573 0.0824938
+-0.438987 0.314574 0.830384 0.137097
+-0.458622 0.285189 0.81964 0.191113
+-0.476292 0.254583 0.805385 0.244311
+-0.491923 0.222887 0.787682 0.296463
+-0.505447 0.190237 0.766606 0.347345
+-0.516807 0.156772 0.742247 0.396739
+-0.525954 0.122635 0.71471 0.444435
+-0.532848 0.0879736 0.684112 0.490228
+-0.537461 0.0529353 0.650585 0.533922
+-0.539773 0.0176704 0.614272 0.575329
+0.539773 0.0176703 -0.444435 -0.71471
+0.537461 0.0529353 -0.396739 -0.742247
+0.532848 0.0879736 -0.347345 -0.766606
+0.525954 0.122635 -0.296463 -0.787682
+0.516807 0.156772 -0.244311 -0.805385
+0.505447 0.190237 -0.191113 -0.81964
+0.491923 0.222887 -0.137097 -0.830384
+0.476292 0.254583 -0.0824937 -0.837573
+0.458622 0.285189 -0.0275372 -0.841175
+0.438987 0.314574 0.0275372 -0.841175
+0.417473 0.342612 0.0824937 -0.837573
+0.394172 0.369182 0.137097 -0.830384
+0.369182 0.394172 0.191113 -0.81964
+0.342612 0.417473 0.244311 -0.805385
+0.314574 0.438987 0.296463 -0.787682
+0.285189 0.458622 0.347345 -0.766606
+0.254583 0.476292 0.39674 -0.742247
+0.222887 0.491923 0.444435 -0.71471
+0.190237 0.505447 0.490228 -0.684112
+0.156772 0.516807 0.533922 -0.650585
+0.122635 0.525954 0.575329 -0.614272
+0.0879735 0.532848 0.614272 -0.575329
+0.0529352 0.537461 0.650585 -0.533921
+0.0176703 0.539773 0.684112 -0.490228
+-0.0176704 0.539773 0.71471 -0.444435
+-0.0529354 0.537461 0.742247 -0.396739
+-0.0879736 0.532848 0.766606 -0.347345
+-0.122635 0.525954 0.787682 -0.296463
+-0.156772 0.516807 0.805385 -0.244311
+-0.190237 0.505447 0.81964 -0.191113
+-0.222887 0.491923 0.830384 -0.137097
+-0.254583 0.476292 0.837573 -0.0824938
+-0.285189 0.458622 0.841175 -0.0275372
+-0.314574 0.438987 0.841175 0.0275373
+-0.342612 0.417473 0.837573 0.0824937
+-0.369182 0.394172 0.830384 0.137097
+-0.394172 0.369182 0.81964 0.191113
+-0.417473 0.342612 0.805385 0.244311
+-0.438987 0.314574 0.787682 0.296463
+-0.458622 0.285189 0.766606 0.347345
+-0.476292 0.254583 0.742247 0.396739
+-0.491923 0.222887 0.71471 0.444435
+-0.505447 0.190237 0.684112 0.490228
+-0.516807 0.156772 0.650585 0.533921
+-0.525954 0.122635 0.614272 0.575329
+-0.532848 0.0879736 0.575329 0.614272
+-0.537461 0.0529353 0.533921 0.650585
+-0.539773 0.0176704 0.490228 0.684112
+0.577041 0.0188904 -0.361127 -0.732294
+0.57457 0.0565902 -0.31246 -0.754344
+0.569639 0.0940477 -0.262454 -0.773165
+0.562268 0.131103 -0.211325 -0.788675
+0.55249 0.167596 -0.159291 -0.800808
+0.540346 0.203372 -0.106574 -0.809511
+0.525887 0.238277 -0.0534014 -0.814748
+0.509177 0.272161 1.72366e-08 -0.816497
+0.490287 0.30488 0.0534014 -0.814748
+0.469297 0.336294 0.106574 -0.809511
+0.446298 0.366267 0.159291 -0.800808
+0.421387 0.394672 0.211325 -0.788675
+0.394672 0.421387 0.262454 -0.773165
+0.366267 0.446298 0.31246 -0.754344
+0.336294 0.469297 0.361127 -0.732294
+0.30488 0.490287 0.408248 -0.707107
+0.272161 0.509178 0.453621 -0.678892
+0.238276 0.525887 0.497052 -0.64777
+0.203372 0.540346 0.538354 -0.613875
+0.167596 0.55249 0.57735 -0.57735
+0.131103 0.562268 0.613875 -0.538354
+0.0940477 0.569639 0.64777 -0.497052
+0.0565902 0.57457 0.678892 -0.453621
+0.0188903 0.577041 0.707107 -0.408248
+-0.0188904 0.577041 0.732294 -0.361127
+-0.0565903 0.57457 0.754345 -0.31246
+-0.0940478 0.569639 0.773165 -0.262454
+-0.131103 0.562268 0.788675 -0.211325
+-0.167596 0.55249 0.800808 -0.15929
+-0.203372 0.540346 0.809511 -0.106574
+-0.238277 0.525887 0.814748 -0.0534013
+-0.272161 0.509178 0.816497 -9.30742e-08
+-0.30488 0.490287 0.814748 0.0534014
+-0.336294 0.469297 0.809511 0.106574
+-0.366267 0.446298 0.800808 0.159291
+-0.394672 0.421387 0.788675 0.211325
+-0.421387 0.394672 0.773165 0.262454
+-0.446298 0.366267 0.754344 0.31246
+-0.469297 0.336294 0.732294 0.361127
+-0.490287 0.30488 0.707107 0.408248
+-0.509177 0.272161 0.678892 0.453621
+-0.525887 0.238277 0.64777 0.497052
+-0.540346 0.203372 0.613875 0.538354
+-0.55249 0.167596 0.57735 0.57735
+-0.562268 0.131103 0.538354 0.613875
+-0.569639 0.0940478 0.497052 0.64777
+-0.57457 0.0565902 0.453621 0.678892
+-0.577041 0.0188904 0.408248 0.707107
+0.577041 0.0188904 -0.497052 -0.64777
+0.57457 0.0565902 -0.453621 -0.678892
+0.569639 0.0940477 -0.408248 -0.707107
+0.562268 0.131103 -0.361127 -0.732294
+0.55249 0.167596 -0.31246 -0.754344
+0.540346 0.203372 -0.262454 -0.773165
+0.525887 0.238277 -0.211325 -0.788675
+0.509177 0.272161 -0.159291 -0.800808
+0.490287 0.30488 -0.106574 -0.809511
+0.469297 0.336294 -0.0534014 -0.814748
+0.446298 0.366267 -1.48024e-08 -0.816497
+0.421387 0.394672 0.0534015 -0.814748
+0.394672 0.421387 0.106574 -0.809511
+0.366267 0.446298 0.159291 -0.800808
+0.336294 0.469297 0.211325 -0.788675
+0.30488 0.490287 0.262454 -0.773165
+0.272161 0.509178 0.31246 -0.754344
+0.238276 0.525887 0.361127 -0.732294
+0.203372 0.540346 0.408248 -0.707107
+0.167596 0.55249 0.453621 -0.678892
+0.131103 0.562268 0.497052 -0.64777
+0.0940477 0.569639 0.538354 -0.613875
+0.0565902 0.57457 0.57735 -0.57735
+0.0188903 0.577041 0.613875 -0.538354
+-0.0188904 0.577041 0.64777 -0.497052
+-0.0565903 0.57457 0.678892 -0.453621
+-0.0940478 0.569639 0.707107 -0.408248
+-0.131103 0.562268 0.732294 -0.361127
+-0.167596 0.55249 0.754345 -0.31246
+-0.203372 0.540346 0.773165 -0.262454
+-0.238277 0.525887 0.788675 -0.211325
+-0.272161 0.509178 0.800808 -0.159291
+-0.30488 0.490287 0.809511 -0.106574
+-0.336294 0.469297 0.814748 -0.0534014
+-0.366267 0.446298 0.816497 -2.77792e-08
+-0.394672 0.421387 0.814748 0.0534015
+-0.421387 0.394672 0.809511 0.106574
+-0.446298 0.366267 0.800808 0.159291
+-0.469297 0.336294 0.788675 0.211325
+-0.490287 0.30488 0.773165 0.262454
+-0.509177 0.272161 0.754345 0.31246
+-0.525887 0.238277 0.732294 0.361127
+-0.540346 0.203372 0.707107 0.408248
+-0.55249 0.167596 0.678892 0.453621
+-0.562268 0.131103 0.64777 0.497052
+-0.569639 0.0940478 0.613875 0.538354
+-0.57457 0.0565902 0.57735 0.57735
+-0.577041 0.0188904 0.538354 0.613875
+0.612045 0.0200363 -0.417474 -0.671353
+0.609424 0.060023 -0.372672 -0.69722
+0.604193 0.0997527 -0.326274 -0.720101
+0.596375 0.139055 -0.278478 -0.739899
+0.586004 0.177762 -0.22949 -0.756528
+0.573123 0.215708 -0.17952 -0.769917
+0.557788 0.25273 -0.12878 -0.78001
+0.540064 0.28867 -0.0774893 -0.786763
+0.520028 0.323374 -0.0258667 -0.790146
+0.497765 0.356693 0.0258667 -0.790146
+0.47337 0.388485 0.0774893 -0.786763
+0.446949 0.418613 0.12878 -0.78001
+0.418613 0.446949 0.17952 -0.769917
+0.388485 0.47337 0.22949 -0.756528
+0.356693 0.497765 0.278478 -0.739899
+0.323374 0.520028 0.326274 -0.720101
+0.28867 0.540064 0.372672 -0.69722
+0.25273 0.557788 0.417474 -0.671353
+0.215708 0.573123 0.460489 -0.642612
+0.177762 0.586004 0.501532 -0.611118
+0.139055 0.596375 0.540427 -0.577008
+0.0997526 0.604193 0.577008 -0.540427
+0.0600229 0.609424 0.611118 -0.501532
+0.0200362 0.612045 0.642612 -0.460489
+-0.0200363 0.612045 0.671353 -0.417474
+-0.060023 0.609424 0.69722 -0.372672
+-0.0997527 0.604193 0.720101 -0.326274
+-0.139055 0.596375 0.739899 -0.278478
+-0.177762 0.586004 0.756528 -0.22949
+-0.215708 0.573123 0.769917 -0.179519
+-0.25273 0.557788 0.78001 -0.12878
+-0.28867 0.540064 0.786763 -0.0774894
+-0.323374 0.520028 0.790146 -0.0258667
+-0.356693 0.497765 0.790146 0.0258668
+-0.388485 0.47337 0.786763 0.0774893
+-0.418613 0.446949 0.78001 0.12878
+-0.446949 0.418613 0.769917 0.17952
+-0.47337 0.388485 0.756528 0.22949
+-0.497765 0.356693 0.739899 0.278478
+-0.520028 0.323374 0.720101 0.326274
+-0.540064 0.28867 0.69722 0.372672
+-0.557788 0.25273 0.671353 0.417474
+-0.573123 0.215708 0.642612 0.460489
+-0.586004 0.177762 0.611118 0.501532
+-0.596375 0.139055 0.577008 0.540427
+-0.604193 0.0997527 0.540427 0.577008
+-0.609424 0.060023 0.501532 0.611118
+-0.612045 0.0200363 0.460489 0.642612
+0.539773 0.0176703 -0.684112 -0.490228
+0.537461 0.0529353 -0.650585 -0.533922
+0.532848 0.0879736 -0.614272 -0.575329
+0.525954 0.122635 -0.575329 -0.614272
+0.516807 0.156772 -0.533922 -0.650585
+0.505447 0.190237 -0.490228 -0.684112
+0.491923 0.222887 -0.444435 -0.71471
+0.476292 0.254583 -0.396739 -0.742247
+0.458622 0.285189 -0.347345 -0.766606
+0.438987 0.314574 -0.296463 -0.787682
+0.417473 0.342612 -0.244311 -0.805385
+0.394172 0.369182 -0.191113 -0.81964
+0.369182 0.394172 -0.137097 -0.830384
+0.342612 0.417473 -0.0824937 -0.837573
+0.314574 0.438987 -0.0275372 -0.841175
+0.285189 0.458622 0.0275373 -0.841175
+0.254583 0.476292 0.0824938 -0.837573
+0.222887 0.491923 0.137097 -0.830384
+0.190237 0.505447 0.191113 -0.81964
+0.156772 0.516807 0.244311 -0.805385
+0.122635 0.525954 0.296463 -0.787682
+0.0879735 0.532848 0.347345 -0.766606
+0.0529352 0.537461 0.39674 -0.742247
+0.0176703 0.539773 0.444435 -0.71471
+-0.0176704 0.539773 0.490228 -0.684112
+-0.0529354 0.537461 0.533922 -0.650585
+-0.0879736 0.532848 0.575329 -0.614272
+-0.122635 0.525954 0.614272 -0.575329
+-0.156772 0.516807 0.650585 -0.533921
+-0.190237 0.505447 0.684112 -0.490228
+-0.222887 0.491923 0.71471 -0.444435
+-0.254583 0.476292 0.742247 -0.39674
+-0.285189 0.458622 0.766606 -0.347345
+-0.314574 0.438987 0.787682 -0.296463
+-0.342612 0.417473 0.805385 -0.244311
+-0.369182 0.394172 0.81964 -0.191113
+-0.394172 0.369182 0.830384 -0.137097
+-0.417473 0.342612 0.837573 -0.0824937
+-0.438987 0.314574 0.841175 -0.0275373
+-0.458622 0.285189 0.841175 0.0275372
+-0.476292 0.254583 0.837573 0.0824936
+-0.491923 0.222887 0.830384 0.137097
+-0.505447 0.190237 0.81964 0.191113
+-0.516807 0.156772 0.805385 0.244311
+-0.525954 0.122635 0.787682 0.296463
+-0.532848 0.0879736 0.766606 0.347345
+-0.537461 0.0529353 0.742247 0.39674
+-0.539773 0.0176704 0.71471 0.444435
+0.577041 0.0188904 -0.613875 -0.538354
+0.57457 0.0565902 -0.57735 -0.57735
+0.569639 0.0940477 -0.538354 -0.613875
+0.562268 0.131103 -0.497052 -0.64777
+0.55249 0.167596 -0.453621 -0.678892
+0.540346 0.203372 -0.408248 -0.707107
+0.525887 0.238277 -0.361127 -0.732294
+0.509177 0.272161 -0.31246 -0.754344
+0.490287 0.30488 -0.262454 -0.773165
+0.469297 0.336294 -0.211325 -0.788675
+0.446298 0.366267 -0.159291 -0.800808
+0.421387 0.394672 -0.106574 -0.809511
+0.394672 0.421387 -0.0534014 -0.814748
+0.366267 0.446298 1.82562e-09 -0.816497
+0.336294 0.469297 0.0534015 -0.814748
+0.30488 0.490287 0.106574 -0.809511
+0.272161 0.509178 0.159291 -0.800808
+0.238276 0.525887 0.211325 -0.788675
+0.203372 0.540346 0.262454 -0.773165
+0.167596 0.55249 0.31246 -0.754344
+0.131103 0.562268 0.361127 -0.732294
+0.0940477 0.569639 0.408248 -0.707107
+0.0565902 0.57457 0.453621 -0.678892
+0.0188903 0.577041 0.497052 -0.64777
+-0.0188904 0.577041 0.538354 -0.613875
+-0.0565903 0.57457 0.57735 -0.57735
+-0.0940478 0.569639 0.613875 -0.538354
+-0.131103 0.562268 0.64777 -0.497052
+-0.167596 0.55249 0.678892 -0.453621
+-0.203372 0.540346 0.707107 -0.408248
+-0.238277 0.525887 0.732294 -0.361127
+-0.272161 0.509178 0.754344 -0.31246
+-0.30488 0.490287 0.773165 -0.262454
+-0.336294 0.469297 0.788675 -0.211325
+-0.366267 0.446298 0.800808 -0.159291
+-0.394672 0.421387 0.809511 -0.106574
+-0.421387 0.394672 0.814748 -0.0534015
+-0.446298 0.366267 0.816497 3.75158e-08
+-0.469297 0.336294 0.814748 0.0534014
+-0.490287 0.30488 0.809511 0.106574
+-0.509177 0.272161 0.800808 0.15929
+-0.525887 0.238277 0.788675 0.211325
+-0.540346 0.203372 0.773165 0.262454
+-0.55249 0.167596 0.754344 0.31246
+-0.562268 0.131103 0.732294 0.361127
+-0.569639 0.0940478 0.707107 0.408248
+-0.57457 0.0565902 0.678892 0.453621
+-0.577041 0.0188904 0.64777 0.497052
+0.577041 0.0188904 -0.707107 -0.408248
+0.57457 0.0565902 -0.678892 -0.453621
+0.569639 0.0940477 -0.64777 -0.497052
+0.562268 0.131103 -0.613875 -0.538354
+0.55249 0.167596 -0.57735 -0.57735
+0.540346 0.203372 -0.538354 -0.613875
+0.525887 0.238277 -0.497052 -0.64777
+0.509177 0.272161 -0.453621 -0.678892
+0.490287 0.30488 -0.408248 -0.707107
+0.469297 0.336294 -0.361127 -0.732294
+0.446298 0.366267 -0.31246 -0.754344
+0.421387 0.394672 -0.262454 -0.773165
+0.394672 0.421387 -0.211325 -0.788675
+0.366267 0.446298 -0.159291 -0.800808
+0.336294 0.469297 -0.106574 -0.809511
+0.30488 0.490287 -0.0534014 -0.814748
+0.272161 0.509178 6.71206e-08 -0.816497
+0.238276 0.525887 0.0534015 -0.814748
+0.203372 0.540346 0.106574 -0.809511
+0.167596 0.55249 0.159291 -0.800808
+0.131103 0.562268 0.211325 -0.788675
+0.0940477 0.569639 0.262454 -0.773165
+0.0565902 0.57457 0.31246 -0.754344
+0.0188903 0.577041 0.361127 -0.732293
+-0.0188904 0.577041 0.408248 -0.707107
+-0.0565903 0.57457 0.453621 -0.678892
+-0.0940478 0.569639 0.497052 -0.64777
+-0.131103 0.562268 0.538354 -0.613875
+-0.167596 0.55249 0.57735 -0.57735
+-0.203372 0.540346 0.613875 -0.538354
+-0.238277 0.525887 0.64777 -0.497052
+-0.272161 0.509178 0.678892 -0.453621
+-0.30488 0.490287 0.707107 -0.408248
+-0.336294 0.469297 0.732294 -0.361127
+-0.366267 0.446298 0.754344 -0.31246
+-0.394672 0.421387 0.773165 -0.262454
+-0.421387 0.394672 0.788675 -0.211325
+-0.446298 0.366267 0.800808 -0.159291
+-0.469297 0.336294 0.809511 -0.106574
+-0.490287 0.30488 0.814748 -0.0534014
+-0.509177 0.272161 0.816497 -9.18571e-08
+-0.525887 0.238277 0.814748 0.0534014
+-0.540346 0.203372 0.809511 0.106574
+-0.55249 0.167596 0.800808 0.159291
+-0.562268 0.131103 0.788675 0.211325
+-0.569639 0.0940478 0.773165 0.262454
+-0.57457 0.0565902 0.754344 0.31246
+-0.577041 0.0188904 0.732294 0.361127
+0.612045 0.0200363 -0.642612 -0.460489
+0.609424 0.060023 -0.611118 -0.501532
+0.604193 0.0997527 -0.577008 -0.540427
+0.596375 0.139055 -0.540427 -0.577008
+0.586004 0.177762 -0.501532 -0.611118
+0.573123 0.215708 -0.460489 -0.642612
+0.557788 0.25273 -0.417474 -0.671353
+0.540064 0.28867 -0.372672 -0.69722
+0.520028 0.323374 -0.326274 -0.720101
+0.497765 0.356693 -0.278478 -0.739899
+0.47337 0.388485 -0.22949 -0.756528
+0.446949 0.418613 -0.17952 -0.769917
+0.418613 0.446949 -0.12878 -0.78001
+0.388485 0.47337 -0.0774894 -0.786763
+0.356693 0.497765 -0.0258667 -0.790146
+0.323374 0.520028 0.0258668 -0.790146
+0.28867 0.540064 0.0774894 -0.786763
+0.25273 0.557788 0.12878 -0.78001
+0.215708 0.573123 0.17952 -0.769917
+0.177762 0.586004 0.22949 -0.756528
+0.139055 0.596375 0.278478 -0.739899
+0.0997526 0.604193 0.326274 -0.720101
+0.0600229 0.609424 0.372672 -0.69722
+0.0200362 0.612045 0.417474 -0.671353
+-0.0200363 0.612045 0.460489 -0.642612
+-0.060023 0.609424 0.501532 -0.611118
+-0.0997527 0.604193 0.540427 -0.577008
+-0.139055 0.596375 0.577008 -0.540427
+-0.177762 0.586004 0.611119 -0.501532
+-0.215708 0.573123 0.642612 -0.460489
+-0.25273 0.557788 0.671353 -0.417474
+-0.28867 0.540064 0.69722 -0.372672
+-0.323374 0.520028 0.720101 -0.326274
+-0.356693 0.497765 0.739899 -0.278478
+-0.388485 0.47337 0.756528 -0.22949
+-0.418613 0.446949 0.769917 -0.179519
+-0.446949 0.418613 0.78001 -0.12878
+-0.47337 0.388485 0.786763 -0.0774893
+-0.497765 0.356693 0.790146 -0.0258668
+-0.520028 0.323374 0.790146 0.0258667
+-0.540064 0.28867 0.786763 0.0774893
+-0.557788 0.25273 0.78001 0.12878
+-0.573123 0.215708 0.769917 0.17952
+-0.586004 0.177762 0.756528 0.22949
+-0.596375 0.139055 0.739899 0.278478
+-0.604193 0.0997527 0.720101 0.326274
+-0.609424 0.060023 0.69722 0.372672
+-0.612045 0.0200363 0.671353 0.417474
+0.612045 0.0200363 -0.540427 -0.577008
+0.609424 0.060023 -0.501532 -0.611118
+0.604193 0.0997527 -0.460489 -0.642612
+0.596375 0.139055 -0.417474 -0.671353
+0.586004 0.177762 -0.372672 -0.69722
+0.573123 0.215708 -0.326274 -0.720101
+0.557788 0.25273 -0.278478 -0.739899
+0.540064 0.28867 -0.22949 -0.756528
+0.520028 0.323374 -0.17952 -0.769917
+0.497765 0.356693 -0.12878 -0.78001
+0.47337 0.388485 -0.0774894 -0.786763
+0.446949 0.418613 -0.0258667 -0.790146
+0.418613 0.446949 0.0258667 -0.790146
+0.388485 0.47337 0.0774894 -0.786763
+0.356693 0.497765 0.12878 -0.78001
+0.323374 0.520028 0.17952 -0.769917
+0.28867 0.540064 0.22949 -0.756528
+0.25273 0.557788 0.278478 -0.739899
+0.215708 0.573123 0.326274 -0.720101
+0.177762 0.586004 0.372672 -0.69722
+0.139055 0.596375 0.417474 -0.671353
+0.0997526 0.604193 0.460489 -0.642612
+0.0600229 0.609424 0.501532 -0.611118
+0.0200362 0.612045 0.540427 -0.577008
+-0.0200363 0.612045 0.577008 -0.540427
+-0.060023 0.609424 0.611118 -0.501532
+-0.0997527 0.604193 0.642612 -0.460489
+-0.139055 0.596375 0.671353 -0.417474
+-0.177762 0.586004 0.69722 -0.372672
+-0.215708 0.573123 0.720101 -0.326273
+-0.25273 0.557788 0.739899 -0.278478
+-0.28867 0.540064 0.756528 -0.22949
+-0.323374 0.520028 0.769917 -0.17952
+-0.356693 0.497765 0.78001 -0.12878
+-0.388485 0.47337 0.786763 -0.0774894
+-0.418613 0.446949 0.790146 -0.0258666
+-0.446949 0.418613 0.790146 0.0258667
+-0.47337 0.388485 0.786763 0.0774894
+-0.497765 0.356693 0.78001 0.12878
+-0.520028 0.323374 0.769917 0.17952
+-0.540064 0.28867 0.756528 0.22949
+-0.557788 0.25273 0.739899 0.278478
+-0.573123 0.215708 0.720101 0.326274
+-0.586004 0.177762 0.69722 0.372672
+-0.596375 0.139055 0.671353 0.417474
+-0.604193 0.0997527 0.642612 0.460489
+-0.609424 0.060023 0.611118 0.501532
+-0.612045 0.0200363 0.577008 0.540427
+0.645152 0.0211201 -0.464949 -0.605934
+0.642389 0.0632698 -0.424324 -0.635045
+0.636876 0.105149 -0.381881 -0.661438
+0.628635 0.146577 -0.337804 -0.684998
+0.617702 0.187378 -0.292279 -0.705625
+0.604125 0.227376 -0.245503 -0.72323
+0.58796 0.266401 -0.197676 -0.737738
+0.569278 0.304285 -0.149003 -0.749087
+0.548158 0.340866 -0.099691 -0.757229
+0.52469 0.375988 -0.0499525 -0.762127
+0.498976 0.409499 -1.38464e-08 -0.763763
+0.471125 0.441257 0.0499525 -0.762127
+0.441257 0.471125 0.099691 -0.757229
+0.409499 0.498976 0.149003 -0.749087
+0.375988 0.52469 0.197676 -0.737738
+0.340866 0.548158 0.245504 -0.72323
+0.304285 0.569278 0.292279 -0.705625
+0.266401 0.58796 0.337804 -0.684998
+0.227376 0.604125 0.381881 -0.661438
+0.187378 0.617702 0.424324 -0.635045
+0.146577 0.628635 0.464949 -0.605934
+0.105148 0.636876 0.503584 -0.574227
+0.0632697 0.642389 0.540062 -0.540062
+0.02112 0.645152 0.574227 -0.503584
+-0.0211201 0.645152 0.605934 -0.464949
+-0.0632698 0.642389 0.635045 -0.424324
+-0.105149 0.636876 0.661438 -0.381881
+-0.146577 0.628635 0.684998 -0.337804
+-0.187378 0.617702 0.705625 -0.292279
+-0.227377 0.604125 0.72323 -0.245503
+-0.266401 0.58796 0.737738 -0.197676
+-0.304285 0.569278 0.749087 -0.149003
+-0.340866 0.548158 0.757229 -0.099691
+-0.375988 0.52469 0.762127 -0.0499524
+-0.409499 0.498976 0.763763 -2.59851e-08
+-0.441257 0.471125 0.762127 0.0499525
+-0.471125 0.441257 0.757229 0.099691
+-0.498976 0.409499 0.749087 0.149003
+-0.52469 0.375988 0.737738 0.197676
+-0.548158 0.340866 0.72323 0.245503
+-0.569278 0.304285 0.705625 0.292279
+-0.58796 0.266401 0.684998 0.337804
+-0.604125 0.227376 0.661438 0.381881
+-0.617702 0.187378 0.635045 0.424324
+-0.628635 0.146577 0.605934 0.464949
+-0.636876 0.105149 0.574227 0.503584
+-0.642389 0.0632698 0.540062 0.540062
+-0.645152 0.0211201 0.503584 0.574227
+0.645152 0.0211201 -0.574227 -0.503584
+0.642389 0.0632698 -0.540062 -0.540062
+0.636876 0.105149 -0.503584 -0.574227
+0.628635 0.146577 -0.464949 -0.605934
+0.617702 0.187378 -0.424324 -0.635045
+0.604125 0.227376 -0.381881 -0.661438
+0.58796 0.266401 -0.337804 -0.684998
+0.569278 0.304285 -0.292279 -0.705625
+0.548158 0.340866 -0.245503 -0.72323
+0.52469 0.375988 -0.197676 -0.737738
+0.498976 0.409499 -0.149003 -0.749087
+0.471125 0.441257 -0.099691 -0.757229
+0.441257 0.471125 -0.0499525 -0.762127
+0.409499 0.498976 1.70771e-09 -0.763763
+0.375988 0.52469 0.0499525 -0.762127
+0.340866 0.548158 0.0996911 -0.757229
+0.304285 0.569278 0.149003 -0.749087
+0.266401 0.58796 0.197676 -0.737738
+0.227376 0.604125 0.245503 -0.72323
+0.187378 0.617702 0.292279 -0.705625
+0.146577 0.628635 0.337804 -0.684998
+0.105148 0.636876 0.381881 -0.661438
+0.0632697 0.642389 0.424324 -0.635045
+0.02112 0.645152 0.464949 -0.605934
+-0.0211201 0.645152 0.503584 -0.574227
+-0.0632698 0.642389 0.540062 -0.540062
+-0.105149 0.636876 0.574227 -0.503584
+-0.146577 0.628635 0.605934 -0.464949
+-0.187378 0.617702 0.635045 -0.424324
+-0.227377 0.604125 0.661438 -0.381881
+-0.266401 0.58796 0.684998 -0.337803
+-0.304285 0.569278 0.705625 -0.292279
+-0.340866 0.548158 0.72323 -0.245503
+-0.375988 0.52469 0.737738 -0.197676
+-0.409499 0.498976 0.749087 -0.149003
+-0.441257 0.471125 0.757229 -0.099691
+-0.471125 0.441257 0.762127 -0.0499525
+-0.498976 0.409499 0.763763 3.50928e-08
+-0.52469 0.375988 0.762127 0.0499524
+-0.548158 0.340866 0.757229 0.099691
+-0.569278 0.304285 0.749087 0.149003
+-0.58796 0.266401 0.737738 0.197676
+-0.604125 0.227376 0.72323 0.245504
+-0.617702 0.187378 0.705625 0.292279
+-0.628635 0.146577 0.684998 0.337804
+-0.636876 0.105149 0.661438 0.381881
+-0.642389 0.0632698 0.635045 0.424324
+-0.645152 0.0211201 0.605934 0.464949
+0.676641 0.0221509 -0.50311 -0.537165
+0.673743 0.0663579 -0.466901 -0.56892
+0.667961 0.110281 -0.428692 -0.598239
+0.659318 0.153731 -0.388647 -0.624996
+0.647852 0.196524 -0.346939 -0.649076
+0.633611 0.238474 -0.303744 -0.670378
+0.616658 0.279404 -0.259249 -0.688808
+0.597064 0.319137 -0.213644 -0.704289
+0.574913 0.357504 -0.167124 -0.716754
+0.5503 0.394339 -0.119888 -0.72615
+0.523331 0.429486 -0.0721387 -0.732436
+0.49412 0.462794 -0.0240806 -0.735586
+0.462794 0.49412 0.0240806 -0.735586
+0.429486 0.523331 0.0721387 -0.732436
+0.394339 0.5503 0.119888 -0.72615
+0.357504 0.574913 0.167124 -0.716754
+0.319137 0.597064 0.213644 -0.704289
+0.279404 0.616658 0.259249 -0.688808
+0.238474 0.633611 0.303744 -0.670378
+0.196524 0.647852 0.346939 -0.649076
+0.153731 0.659318 0.388647 -0.624996
+0.110281 0.667961 0.428692 -0.598239
+0.0663578 0.673743 0.466901 -0.56892
+0.0221508 0.676641 0.50311 -0.537165
+-0.022151 0.676641 0.537165 -0.50311
+-0.066358 0.673743 0.56892 -0.466901
+-0.110281 0.667961 0.598239 -0.428692
+-0.153731 0.659318 0.624996 -0.388647
+-0.196524 0.647852 0.649077 -0.346938
+-0.238475 0.633611 0.670378 -0.303744
+-0.279404 0.616658 0.688808 -0.259249
+-0.319137 0.597064 0.704289 -0.213644
+-0.357504 0.574913 0.716754 -0.167124
+-0.394339 0.5503 0.72615 -0.119888
+-0.429486 0.523331 0.732436 -0.0721387
+-0.462794 0.49412 0.735586 -0.0240805
+-0.49412 0.462794 0.735586 0.0240805
+-0.523331 0.429486 0.732436 0.0721387
+-0.5503 0.394339 0.72615 0.119888
+-0.574913 0.357504 0.716754 0.167124
+-0.597063 0.319137 0.704289 0.213644
+-0.616658 0.279404 0.688808 0.259249
+-0.633611 0.238474 0.670378 0.303744
+-0.647852 0.196524 0.649076 0.346939
+-0.659318 0.153731 0.624996 0.388647
+-0.667961 0.110281 0.598239 0.428692
+-0.673743 0.0663579 0.56892 0.466901
+-0.676641 0.022151 0.537165 0.50311
+0.0510037 0.00166969 0.728913 -0.682702
+0.0507853 0.00500192 0.772003 -0.633567
+0.0503494 0.00831272 0.811788 -0.581719
+0.049698 0.0115879 0.848096 -0.52738
+0.0488337 0.0148135 0.880772 -0.470783
+0.0477602 0.0179757 0.909677 -0.412169
+0.0464823 0.0210609 0.934687 -0.351791
+0.0450054 0.0240559 0.955694 -0.289906
+0.0433357 0.0269479 0.972608 -0.22678
+0.0414804 0.0297244 0.985358 -0.162683
+0.0394475 0.0323737 0.993888 -0.0978894
+0.0372457 0.0348844 0.998162 -0.0326764
+0.0348844 0.0372457 0.998162 0.0326765
+0.0323737 0.0394475 0.993888 0.0978894
+0.0297244 0.0414804 0.985358 0.162683
+0.0269478 0.0433357 0.972608 0.22678
+0.0240559 0.0450054 0.955693 0.289907
+0.0210609 0.0464823 0.934686 0.351791
+0.0179757 0.0477603 0.909677 0.412169
+0.0148135 0.0488337 0.880772 0.470783
+0.0115879 0.049698 0.848096 0.52738
+0.00831272 0.0503494 0.811788 0.581719
+0.00500191 0.0507853 0.772003 0.633567
+0.00166968 0.0510037 0.728913 0.682702
+-0.00166969 0.0510037 0.682702 0.728913
+-0.00500192 0.0507853 0.633567 0.772003
+-0.00831273 0.0503494 0.581719 0.811788
+-0.0115879 0.049698 0.52738 0.848096
+-0.0148135 0.0488337 0.470782 0.880772
+-0.0179757 0.0477602 0.412169 0.909677
+-0.0210609 0.0464823 0.351791 0.934687
+-0.0240559 0.0450054 0.289907 0.955693
+-0.0269478 0.0433357 0.22678 0.972608
+-0.0297244 0.0414804 0.162683 0.985358
+-0.0323737 0.0394475 0.0978895 0.993888
+-0.0348844 0.0372457 0.0326764 0.998162
+-0.0372457 0.0348844 -0.0326764 0.998162
+-0.0394475 0.0323737 -0.0978895 0.993888
+-0.0414804 0.0297244 -0.162683 0.985358
+-0.0433357 0.0269478 -0.22678 0.972608
+-0.0450054 0.0240559 -0.289906 0.955694
+-0.0464823 0.0210609 -0.351791 0.934687
+-0.0477603 0.0179757 -0.412169 0.909677
+-0.0488337 0.0148135 -0.470783 0.880772
+-0.049698 0.0115879 -0.52738 0.848096
+-0.0503494 0.00831273 -0.581719 0.811788
+-0.0507853 0.00500191 -0.633567 0.772003
+-0.0510037 0.00166969 -0.682702 0.728913
+0.102007 0.00333938 0.931019 -0.350411
+0.101571 0.0100038 0.951943 -0.288769
+0.100699 0.0166254 0.968791 -0.22589
+0.0993959 0.0231759 0.981491 -0.162045
+0.0976673 0.0296271 0.989988 -0.0975053
+0.0955205 0.0359514 0.994245 -0.0325482
+0.0929646 0.0421217 0.994245 0.0325482
+0.0900107 0.0481117 0.989988 0.0975053
+0.0866713 0.0538957 0.981491 0.162045
+0.0829608 0.0594489 0.968791 0.22589
+0.0788951 0.0647475 0.951943 0.288769
+0.0744914 0.0697688 0.931019 0.350411
+0.0697688 0.0744914 0.906107 0.410552
+0.0647475 0.078895 0.877316 0.468935
+0.0594489 0.0829608 0.844768 0.52531
+0.0538957 0.0866713 0.808602 0.579436
+0.0481117 0.0900107 0.768974 0.631081
+0.0421217 0.0929647 0.726053 0.680023
+0.0359514 0.0955205 0.680023 0.726053
+0.0296271 0.0976673 0.63108 0.768974
+0.0231759 0.0993959 0.579436 0.808602
+0.0166254 0.100699 0.52531 0.844768
+0.0100038 0.101571 0.468935 0.877316
+0.00333937 0.102007 0.410552 0.906107
+-0.00333939 0.102007 0.350411 0.931019
+-0.0100038 0.101571 0.288769 0.951943
+-0.0166255 0.100699 0.22589 0.968791
+-0.0231759 0.0993959 0.162045 0.981491
+-0.0296271 0.0976673 0.0975051 0.989988
+-0.0359514 0.0955205 0.0325481 0.994245
+-0.0421217 0.0929646 -0.0325484 0.994245
+-0.0481117 0.0900107 -0.0975052 0.989988
+-0.0538957 0.0866713 -0.162045 0.981491
+-0.0594489 0.0829608 -0.225891 0.968791
+-0.0647475 0.0788951 -0.288769 0.951943
+-0.0697689 0.0744914 -0.350411 0.931019
+-0.0744914 0.0697689 -0.410552 0.906107
+-0.0788951 0.0647475 -0.468935 0.877316
+-0.0829608 0.0594489 -0.52531 0.844768
+-0.0866713 0.0538957 -0.579436 0.808602
+-0.0900107 0.0481117 -0.63108 0.768974
+-0.0929646 0.0421217 -0.680023 0.726053
+-0.0955205 0.0359514 -0.726053 0.680023
+-0.0976673 0.0296271 -0.768974 0.631081
+-0.0993959 0.0231759 -0.808602 0.579436
+-0.100699 0.0166255 -0.844768 0.52531
+-0.101571 0.0100038 -0.877316 0.468935
+-0.102007 0.00333939 -0.906107 0.410552
+0.102007 0.00333938 0.410552 -0.906107
+0.101571 0.0100038 0.468935 -0.877316
+0.100699 0.0166254 0.52531 -0.844768
+0.0993959 0.0231759 0.579436 -0.808602
+0.0976673 0.0296271 0.631081 -0.768974
+0.0955205 0.0359514 0.680023 -0.726053
+0.0929646 0.0421217 0.726053 -0.680023
+0.0900107 0.0481117 0.768974 -0.63108
+0.0866713 0.0538957 0.808602 -0.579436
+0.0829608 0.0594489 0.844768 -0.52531
+0.0788951 0.0647475 0.877316 -0.468935
+0.0744914 0.0697688 0.906107 -0.410552
+0.0697688 0.0744914 0.931019 -0.350411
+0.0647475 0.078895 0.951943 -0.288769
+0.0594489 0.0829608 0.968791 -0.22589
+0.0538957 0.0866713 0.981491 -0.162045
+0.0481117 0.0900107 0.989988 -0.0975052
+0.0421217 0.0929647 0.994245 -0.0325482
+0.0359514 0.0955205 0.994245 0.0325483
+0.0296271 0.0976673 0.989988 0.0975053
+0.0231759 0.0993959 0.981491 0.162045
+0.0166254 0.100699 0.968791 0.225891
+0.0100038 0.101571 0.951943 0.288769
+0.00333937 0.102007 0.931019 0.350411
+-0.00333939 0.102007 0.906107 0.410552
+-0.0100038 0.101571 0.877316 0.468935
+-0.0166255 0.100699 0.844768 0.52531
+-0.0231759 0.0993959 0.808602 0.579436
+-0.0296271 0.0976673 0.768974 0.631081
+-0.0359514 0.0955205 0.726053 0.680023
+-0.0421217 0.0929646 0.680023 0.726053
+-0.0481117 0.0900107 0.631081 0.768974
+-0.0538957 0.0866713 0.579436 0.808602
+-0.0594489 0.0829608 0.52531 0.844768
+-0.0647475 0.0788951 0.468935 0.877316
+-0.0697689 0.0744914 0.410552 0.906107
+-0.0744914 0.0697689 0.350411 0.931019
+-0.0788951 0.0647475 0.288769 0.951943
+-0.0829608 0.0594489 0.225891 0.968791
+-0.0866713 0.0538957 0.162045 0.981491
+-0.0900107 0.0481117 0.0975054 0.989988
+-0.0929646 0.0421217 0.0325482 0.994245
+-0.0955205 0.0359514 -0.0325483 0.994245
+-0.0976673 0.0296271 -0.0975053 0.989988
+-0.0993959 0.0231759 -0.162045 0.981491
+-0.100699 0.0166255 -0.22589 0.968791
+-0.101571 0.0100038 -0.288769 0.951943
+-0.102007 0.00333939 -0.350411 0.931019
+0.153011 0.00500907 0.72126 -0.675534
+0.152356 0.0150057 0.763898 -0.626915
+0.151048 0.0249382 0.803265 -0.575611
+0.149094 0.0347638 0.839192 -0.521843
+0.146501 0.0444406 0.871525 -0.46584
+0.143281 0.0539271 0.900126 -0.407842
+0.139447 0.0631826 0.924873 -0.348098
+0.135016 0.0721676 0.94566 -0.286863
+0.130007 0.0808436 0.962397 -0.224399
+0.124441 0.0891733 0.975013 -0.160975
+0.118343 0.0971212 0.983453 -0.0968617
+0.111737 0.104653 0.987683 -0.0323334
+0.104653 0.111737 0.987683 0.0323334
+0.0971212 0.118343 0.983453 0.0968617
+0.0891733 0.124441 0.975013 0.160975
+0.0808435 0.130007 0.962397 0.2244
+0.0721676 0.135016 0.94566 0.286863
+0.0631826 0.139447 0.924873 0.348098
+0.053927 0.143281 0.900126 0.407842
+0.0444406 0.146501 0.871525 0.46584
+0.0347638 0.149094 0.839192 0.521843
+0.0249382 0.151048 0.803265 0.575611
+0.0150057 0.152356 0.763898 0.626915
+0.00500905 0.153011 0.72126 0.675534
+-0.00500908 0.153011 0.675534 0.72126
+-0.0150058 0.152356 0.626915 0.763898
+-0.0249382 0.151048 0.575611 0.803265
+-0.0347638 0.149094 0.521843 0.839192
+-0.0444406 0.146501 0.46584 0.871525
+-0.0539271 0.143281 0.407842 0.900126
+-0.0631826 0.139447 0.348098 0.924873
+-0.0721676 0.135016 0.286863 0.94566
+-0.0808435 0.130007 0.224399 0.962397
+-0.0891733 0.124441 0.160975 0.975013
+-0.0971212 0.118343 0.0968617 0.983453
+-0.104653 0.111737 0.0323333 0.987683
+-0.111737 0.104653 -0.0323333 0.987683
+-0.118343 0.0971212 -0.0968617 0.983453
+-0.124441 0.0891733 -0.160975 0.975013
+-0.130007 0.0808435 -0.224399 0.962397
+-0.135016 0.0721676 -0.286863 0.94566
+-0.139447 0.0631826 -0.348098 0.924873
+-0.143281 0.053927 -0.407842 0.900126
+-0.146501 0.0444406 -0.46584 0.871525
+-0.149094 0.0347638 -0.521843 0.839192
+-0.151048 0.0249382 -0.575611 0.803265
+-0.152356 0.0150057 -0.626915 0.763898
+-0.153011 0.00500908 -0.675534 0.72126
+0.153011 0.00500907 0.962397 -0.224399
+0.152356 0.0150057 0.975013 -0.160975
+0.151048 0.0249382 0.983453 -0.0968617
+0.149094 0.0347638 0.987683 -0.0323334
+0.146501 0.0444406 0.987683 0.0323334
+0.143281 0.0539271 0.983453 0.0968617
+0.139447 0.0631826 0.975013 0.160975
+0.135016 0.0721676 0.962397 0.224399
+0.130007 0.0808436 0.94566 0.286863
+0.124441 0.0891733 0.924873 0.348098
+0.118343 0.0971212 0.900126 0.407842
+0.111737 0.104653 0.871525 0.46584
+0.104653 0.111737 0.839192 0.521843
+0.0971212 0.118343 0.803265 0.575611
+0.0891733 0.124441 0.763898 0.626915
+0.0808435 0.130007 0.72126 0.675534
+0.0721676 0.135016 0.675534 0.72126
+0.0631826 0.139447 0.626915 0.763898
+0.053927 0.143281 0.575611 0.803265
+0.0444406 0.146501 0.521843 0.839192
+0.0347638 0.149094 0.46584 0.871525
+0.0249382 0.151048 0.407842 0.900126
+0.0150057 0.152356 0.348098 0.924873
+0.00500905 0.153011 0.286863 0.94566
+-0.00500908 0.153011 0.224399 0.962397
+-0.0150058 0.152356 0.160975 0.975013
+-0.0249382 0.151048 0.0968616 0.983453
+-0.0347638 0.149094 0.0323333 0.987683
+-0.0444406 0.146501 -0.0323335 0.987683
+-0.0539271 0.143281 -0.0968618 0.983453
+-0.0631826 0.139447 -0.160975 0.975013
+-0.0721676 0.135016 -0.224399 0.962397
+-0.0808435 0.130007 -0.286863 0.94566
+-0.0891733 0.124441 -0.348098 0.924873
+-0.0971212 0.118343 -0.407842 0.900126
+-0.104653 0.111737 -0.46584 0.871525
+-0.111737 0.104653 -0.521843 0.839192
+-0.118343 0.0971212 -0.575611 0.803265
+-0.124441 0.0891733 -0.626915 0.763898
+-0.130007 0.0808435 -0.675534 0.72126
+-0.135016 0.0721676 -0.72126 0.675534
+-0.139447 0.0631826 -0.763898 0.626915
+-0.143281 0.053927 -0.803265 0.575611
+-0.146501 0.0444406 -0.839192 0.521843
+-0.149094 0.0347638 -0.871525 0.46584
+-0.151048 0.0249382 -0.900126 0.407842
+-0.152356 0.0150057 -0.924873 0.348098
+-0.153011 0.00500908 -0.94566 0.286863
+0.204015 0.00667875 0.96587 -0.159466
+0.203141 0.0200077 0.974231 -0.0959534
+0.201398 0.0332509 0.978421 -0.0320302
+0.198792 0.0463518 0.978421 0.0320302
+0.195335 0.0592541 0.974231 0.0959534
+0.191041 0.0719027 0.96587 0.159466
+0.185929 0.0842435 0.953372 0.222295
+0.180021 0.0962235 0.936792 0.284173
+0.173343 0.107791 0.9162 0.344833
+0.165922 0.118898 0.891686 0.404017
+0.15779 0.129495 0.863352 0.461471
+0.148983 0.139538 0.831322 0.516949
+0.139538 0.148983 0.795732 0.570214
+0.129495 0.15779 0.756735 0.621036
+0.118898 0.165922 0.714497 0.669199
+0.107791 0.173343 0.669199 0.714497
+0.0962234 0.180021 0.621036 0.756735
+0.0842435 0.185929 0.570214 0.795732
+0.0719027 0.191041 0.516949 0.831322
+0.0592541 0.195335 0.461471 0.863352
+0.0463517 0.198792 0.404017 0.891686
+0.0332509 0.201398 0.344833 0.9162
+0.0200076 0.203141 0.284173 0.936792
+0.00667873 0.204015 0.222295 0.953372
+-0.00667877 0.204015 0.159466 0.96587
+-0.0200077 0.203141 0.0959533 0.974231
+-0.0332509 0.201398 0.0320301 0.978421
+-0.0463518 0.198792 -0.0320302 0.978421
+-0.0592541 0.195335 -0.0959535 0.974231
+-0.0719028 0.191041 -0.159466 0.96587
+-0.0842435 0.185929 -0.222295 0.953372
+-0.0962234 0.180021 -0.284173 0.936792
+-0.107791 0.173343 -0.344833 0.9162
+-0.118898 0.165922 -0.404018 0.891686
+-0.129495 0.15779 -0.461471 0.863352
+-0.139538 0.148983 -0.516949 0.831322
+-0.148983 0.139538 -0.570214 0.795732
+-0.15779 0.129495 -0.621036 0.756735
+-0.165922 0.118898 -0.669199 0.714497
+-0.173343 0.107791 -0.714497 0.669199
+-0.180021 0.0962235 -0.756735 0.621036
+-0.185929 0.0842435 -0.795732 0.570214
+-0.191041 0.0719027 -0.831322 0.516949
+-0.195335 0.0592541 -0.863352 0.461472
+-0.198792 0.0463517 -0.891686 0.404017
+-0.201398 0.0332509 -0.9162 0.344833
+-0.203141 0.0200077 -0.936792 0.284173
+-0.204015 0.00667877 -0.953372 0.222295
+0.204015 0.00667875 0.831322 -0.516949
+0.203141 0.0200077 0.863352 -0.461471
+0.201398 0.0332509 0.891686 -0.404017
+0.198792 0.0463518 0.9162 -0.344833
+0.195335 0.0592541 0.936792 -0.284173
+0.191041 0.0719027 0.953372 -0.222295
+0.185929 0.0842435 0.96587 -0.159466
+0.180021 0.0962235 0.974231 -0.0959534
+0.173343 0.107791 0.978421 -0.0320302
+0.165922 0.118898 0.978421 0.0320302
+0.15779 0.129495 0.974231 0.0959534
+0.148983 0.139538 0.96587 0.159466
+0.139538 0.148983 0.953372 0.222295
+0.129495 0.15779 0.936792 0.284173
+0.118898 0.165922 0.9162 0.344833
+0.107791 0.173343 0.891686 0.404018
+0.0962234 0.180021 0.863352 0.461472
+0.0842435 0.185929 0.831322 0.516949
+0.0719027 0.191041 0.795732 0.570214
+0.0592541 0.195335 0.756735 0.621036
+0.0463517 0.198792 0.714497 0.669199
+0.0332509 0.201398 0.669199 0.714497
+0.0200076 0.203141 0.621036 0.756735
+0.00667873 0.204015 0.570214 0.795732
+-0.00667877 0.204015 0.516949 0.831322
+-0.0200077 0.203141 0.461471 0.863352
+-0.0332509 0.201398 0.404017 0.891686
+-0.0463518 0.198792 0.344833 0.9162
+-0.0592541 0.195335 0.284173 0.936792
+-0.0719028 0.191041 0.222295 0.953372
+-0.0842435 0.185929 0.159466 0.96587
+-0.0962234 0.180021 0.0959535 0.974231
+-0.107791 0.173343 0.0320302 0.978421
+-0.118898 0.165922 -0.0320303 0.978421
+-0.129495 0.15779 -0.0959534 0.974231
+-0.139538 0.148983 -0.159466 0.96587
+-0.148983 0.139538 -0.222295 0.953372
+-0.15779 0.129495 -0.284173 0.936792
+-0.165922 0.118898 -0.344833 0.9162
+-0.173343 0.107791 -0.404018 0.891686
+-0.180021 0.0962235 -0.461471 0.863352
+-0.185929 0.0842435 -0.516949 0.831322
+-0.191041 0.0719027 -0.570214 0.795732
+-0.195335 0.0592541 -0.621036 0.756735
+-0.198792 0.0463517 -0.669199 0.714497
+-0.201398 0.0332509 -0.714497 0.669199
+-0.203141 0.0200077 -0.756735 0.621036
+-0.204015 0.00667877 -0.795732 0.570214
+0.255019 0.00834844 0.875416 -0.41054
+0.253927 0.0250096 0.900392 -0.352407
+0.251747 0.0415636 0.921513 -0.292764
+0.24849 0.0579397 0.938687 -0.231867
+0.244168 0.0740676 0.951842 -0.169977
+0.238801 0.0898784 0.960921 -0.10736
+0.232412 0.105304 0.965886 -0.0442829
+0.225027 0.120279 0.966714 0.0189838
+0.216678 0.134739 0.963402 0.0821693
+0.207402 0.148622 0.955965 0.145003
+0.197238 0.161869 0.944435 0.207216
+0.186229 0.174422 0.92886 0.268541
+0.174422 0.186229 0.909308 0.328716
+0.161869 0.197238 0.885862 0.387484
+0.148622 0.207402 0.858623 0.444593
+0.134739 0.216678 0.827707 0.499797
+0.120279 0.225027 0.793246 0.552862
+0.105304 0.232412 0.755389 0.603559
+0.0898784 0.238801 0.714297 0.651671
+0.0740676 0.244168 0.670146 0.696993
+0.0579397 0.24849 0.623126 0.739331
+0.0415636 0.251747 0.573437 0.778502
+0.0250096 0.253927 0.521293 0.81434
+0.00834842 0.255019 0.466916 0.846691
+-0.00834847 0.255019 0.41054 0.875416
+-0.0250096 0.253927 0.352406 0.900392
+-0.0415636 0.251747 0.292764 0.921513
+-0.0579397 0.24849 0.231867 0.938687
+-0.0740677 0.244168 0.169977 0.951842
+-0.0898785 0.238801 0.10736 0.960921
+-0.105304 0.232412 0.0442828 0.965886
+-0.120279 0.225027 -0.0189837 0.966714
+-0.134739 0.216678 -0.0821693 0.963402
+-0.148622 0.207402 -0.145003 0.955965
+-0.161869 0.197238 -0.207216 0.944435
+-0.174422 0.186229 -0.268541 0.92886
+-0.186229 0.174422 -0.328716 0.909308
+-0.197238 0.161869 -0.387484 0.885862
+-0.207402 0.148622 -0.444593 0.858623
+-0.216678 0.134739 -0.499797 0.827707
+-0.225027 0.120279 -0.552862 0.793246
+-0.232412 0.105304 -0.603559 0.755389
+-0.238801 0.0898784 -0.651672 0.714297
+-0.244168 0.0740676 -0.696993 0.670146
+-0.24849 0.0579397 -0.739331 0.623126
+-0.251747 0.0415636 -0.778502 0.573437
+-0.253927 0.0250096 -0.81434 0.521293
+-0.255019 0.00834847 -0.846691 0.466916
+0.153011 0.00500907 0.286863 -0.94566
+0.152356 0.0150057 0.348098 -0.924873
+0.151048 0.0249382 0.407842 -0.900126
+0.149094 0.0347638 0.46584 -0.871525
+0.146501 0.0444406 0.521843 -0.839192
+0.143281 0.0539271 0.575611 -0.803265
+0.139447 0.0631826 0.626915 -0.763898
+0.135016 0.0721676 0.675534 -0.72126
+0.130007 0.0808436 0.72126 -0.675534
+0.124441 0.0891733 0.763898 -0.626915
+0.118343 0.0971212 0.803265 -0.575611
+0.111737 0.104653 0.839192 -0.521843
+0.104653 0.111737 0.871525 -0.46584
+0.0971212 0.118343 0.900126 -0.407842
+0.0891733 0.124441 0.924873 -0.348098
+0.0808435 0.130007 0.94566 -0.286863
+0.0721676 0.135016 0.962397 -0.224399
+0.0631826 0.139447 0.975013 -0.160975
+0.053927 0.143281 0.983453 -0.0968616
+0.0444406 0.146501 0.987683 -0.0323333
+0.0347638 0.149094 0.987683 0.0323334
+0.0249382 0.151048 0.983453 0.0968618
+0.0150057 0.152356 0.975013 0.160975
+0.00500905 0.153011 0.962397 0.2244
+-0.00500908 0.153011 0.94566 0.286863
+-0.0150058 0.152356 0.924873 0.348098
+-0.0249382 0.151048 0.900126 0.407842
+-0.0347638 0.149094 0.871525 0.46584
+-0.0444406 0.146501 0.839192 0.521843
+-0.0539271 0.143281 0.803265 0.575611
+-0.0631826 0.139447 0.763898 0.626915
+-0.0721676 0.135016 0.72126 0.675534
+-0.0808435 0.130007 0.675534 0.72126
+-0.0891733 0.124441 0.626915 0.763898
+-0.0971212 0.118343 0.575611 0.803265
+-0.104653 0.111737 0.521843 0.839192
+-0.111737 0.104653 0.46584 0.871525
+-0.118343 0.0971212 0.407842 0.900126
+-0.124441 0.0891733 0.348098 0.924873
+-0.130007 0.0808435 0.286863 0.94566
+-0.135016 0.0721676 0.2244 0.962397
+-0.139447 0.0631826 0.160975 0.975013
+-0.143281 0.053927 0.0968616 0.983453
+-0.146501 0.0444406 0.0323334 0.987683
+-0.149094 0.0347638 -0.0323335 0.987683
+-0.151048 0.0249382 -0.0968616 0.983453
+-0.152356 0.0150057 -0.160975 0.975013
+-0.153011 0.00500908 -0.224399 0.962397
+0.204015 0.00667875 0.570214 -0.795732
+0.203141 0.0200077 0.621036 -0.756735
+0.201398 0.0332509 0.669199 -0.714497
+0.198792 0.0463518 0.714497 -0.669199
+0.195335 0.0592541 0.756735 -0.621036
+0.191041 0.0719027 0.795732 -0.570214
+0.185929 0.0842435 0.831322 -0.516949
+0.180021 0.0962235 0.863352 -0.461471
+0.173343 0.107791 0.891686 -0.404017
+0.165922 0.118898 0.9162 -0.344833
+0.15779 0.129495 0.936792 -0.284173
+0.148983 0.139538 0.953372 -0.222295
+0.139538 0.148983 0.96587 -0.159466
+0.129495 0.15779 0.974231 -0.0959534
+0.118898 0.165922 0.978421 -0.0320301
+0.107791 0.173343 0.978421 0.0320303
+0.0962234 0.180021 0.974231 0.0959535
+0.0842435 0.185929 0.96587 0.159466
+0.0719027 0.191041 0.953372 0.222295
+0.0592541 0.195335 0.936792 0.284173
+0.0463517 0.198792 0.9162 0.344833
+0.0332509 0.201398 0.891686 0.404018
+0.0200076 0.203141 0.863352 0.461472
+0.00667873 0.204015 0.831322 0.516949
+-0.00667877 0.204015 0.795732 0.570214
+-0.0200077 0.203141 0.756735 0.621036
+-0.0332509 0.201398 0.714497 0.669199
+-0.0463518 0.198792 0.669199 0.714497
+-0.0592541 0.195335 0.621036 0.756735
+-0.0719028 0.191041 0.570214 0.795732
+-0.0842435 0.185929 0.516949 0.831322
+-0.0962234 0.180021 0.461472 0.863352
+-0.107791 0.173343 0.404017 0.891686
+-0.118898 0.165922 0.344833 0.9162
+-0.129495 0.15779 0.284173 0.936792
+-0.139538 0.148983 0.222295 0.953372
+-0.148983 0.139538 0.159466 0.96587
+-0.15779 0.129495 0.0959533 0.974231
+-0.165922 0.118898 0.0320303 0.978421
+-0.173343 0.107791 -0.0320302 0.978421
+-0.180021 0.0962235 -0.0959533 0.974231
+-0.185929 0.0842435 -0.159466 0.96587
+-0.191041 0.0719027 -0.222295 0.953372
+-0.195335 0.0592541 -0.284173 0.936792
+-0.198792 0.0463517 -0.344833 0.9162
+-0.201398 0.0332509 -0.404017 0.891686
+-0.203141 0.0200077 -0.461472 0.863352
+-0.204015 0.00667877 -0.516949 0.831322
+0.204015 0.00667875 0.222295 -0.953372
+0.203141 0.0200077 0.284173 -0.936792
+0.201398 0.0332509 0.344833 -0.9162
+0.198792 0.0463518 0.404017 -0.891686
+0.195335 0.0592541 0.461471 -0.863352
+0.191041 0.0719027 0.516949 -0.831322
+0.185929 0.0842435 0.570214 -0.795732
+0.180021 0.0962235 0.621036 -0.756735
+0.173343 0.107791 0.669199 -0.714497
+0.165922 0.118898 0.714497 -0.669199
+0.15779 0.129495 0.756735 -0.621036
+0.148983 0.139538 0.795732 -0.570214
+0.139538 0.148983 0.831322 -0.516949
+0.129495 0.15779 0.863352 -0.461471
+0.118898 0.165922 0.891686 -0.404017
+0.107791 0.173343 0.9162 -0.344833
+0.0962234 0.180021 0.936792 -0.284173
+0.0842435 0.185929 0.953372 -0.222295
+0.0719027 0.191041 0.96587 -0.159466
+0.0592541 0.195335 0.974231 -0.0959533
+0.0463517 0.198792 0.978421 -0.0320302
+0.0332509 0.201398 0.978421 0.0320303
+0.0200076 0.203141 0.974231 0.0959535
+0.00667873 0.204015 0.96587 0.159466
+-0.00667877 0.204015 0.953372 0.222295
+-0.0200077 0.203141 0.936792 0.284173
+-0.0332509 0.201398 0.9162 0.344833
+-0.0463518 0.198792 0.891686 0.404018
+-0.0592541 0.195335 0.863352 0.461472
+-0.0719028 0.191041 0.831322 0.51695
+-0.0842435 0.185929 0.795732 0.570214
+-0.0962234 0.180021 0.756735 0.621036
+-0.107791 0.173343 0.714497 0.669199
+-0.118898 0.165922 0.669199 0.714497
+-0.129495 0.15779 0.621036 0.756735
+-0.139538 0.148983 0.570214 0.795732
+-0.148983 0.139538 0.516949 0.831322
+-0.15779 0.129495 0.461471 0.863352
+-0.165922 0.118898 0.404018 0.891686
+-0.173343 0.107791 0.344833 0.9162
+-0.180021 0.0962235 0.284173 0.936792
+-0.185929 0.0842435 0.222295 0.953372
+-0.191041 0.0719027 0.159466 0.96587
+-0.195335 0.0592541 0.0959534 0.974231
+-0.198792 0.0463517 0.0320301 0.978421
+-0.201398 0.0332509 -0.0320301 0.978421
+-0.203141 0.0200077 -0.0959534 0.974231
+-0.204015 0.00667877 -0.159466 0.96587
+0.255019 0.00834844 0.466916 -0.846691
+0.253927 0.0250096 0.521293 -0.81434
+0.251747 0.0415636 0.573437 -0.778502
+0.24849 0.0579397 0.623126 -0.739331
+0.244168 0.0740676 0.670146 -0.696993
+0.238801 0.0898784 0.714297 -0.651671
+0.232412 0.105304 0.755389 -0.603559
+0.225027 0.120279 0.793246 -0.552862
+0.216678 0.134739 0.827707 -0.499797
+0.207402 0.148622 0.858623 -0.444593
+0.197238 0.161869 0.885862 -0.387484
+0.186229 0.174422 0.909308 -0.328716
+0.174422 0.186229 0.92886 -0.268541
+0.161869 0.197238 0.944435 -0.207216
+0.148622 0.207402 0.955965 -0.145003
+0.134739 0.216678 0.963402 -0.0821692
+0.120279 0.225027 0.966714 -0.0189837
+0.105304 0.232412 0.965886 0.044283
+0.0898784 0.238801 0.960921 0.10736
+0.0740676 0.244168 0.951842 0.169977
+0.0579397 0.24849 0.938687 0.231867
+0.0415636 0.251747 0.921512 0.292764
+0.0250096 0.253927 0.900392 0.352407
+0.00834842 0.255019 0.875415 0.410541
+-0.00834847 0.255019 0.846691 0.466916
+-0.0250096 0.253927 0.81434 0.521293
+-0.0415636 0.251747 0.778502 0.573437
+-0.0579397 0.24849 0.739331 0.623126
+-0.0740677 0.244168 0.696993 0.670146
+-0.0898785 0.238801 0.651671 0.714297
+-0.105304 0.232412 0.603559 0.755389
+-0.120279 0.225027 0.552862 0.793246
+-0.134739 0.216678 0.499797 0.827707
+-0.148622 0.207402 0.444593 0.858623
+-0.161869 0.197238 0.387484 0.885862
+-0.174422 0.186229 0.328716 0.909308
+-0.186229 0.174422 0.268541 0.92886
+-0.197238 0.161869 0.207216 0.944435
+-0.207402 0.148622 0.145003 0.955965
+-0.216678 0.134739 0.0821693 0.963402
+-0.225027 0.120279 0.0189839 0.966714
+-0.232412 0.105304 -0.0442829 0.965886
+-0.238801 0.0898784 -0.10736 0.960921
+-0.244168 0.0740676 -0.169977 0.951842
+-0.24849 0.0579397 -0.231867 0.938687
+-0.251747 0.0415636 -0.292764 0.921513
+-0.253927 0.0250096 -0.352407 0.900392
+-0.255019 0.00834847 -0.41054 0.875416
+0.255019 0.00834844 0.705706 -0.660965
+0.253927 0.0250096 0.747424 -0.613395
+0.251747 0.0415636 0.785942 -0.563198
+0.24849 0.0579397 0.821094 -0.510589
+0.244168 0.0740676 0.85273 -0.455794
+0.238801 0.0898784 0.880714 -0.399046
+0.232412 0.105304 0.904928 -0.340591
+0.225027 0.120279 0.925266 -0.280676
+0.216678 0.134739 0.941642 -0.21956
+0.207402 0.148622 0.953986 -0.157504
+0.197238 0.161869 0.962244 -0.0947728
+0.186229 0.174422 0.966382 -0.0316361
+0.174422 0.186229 0.966382 0.0316361
+0.161869 0.197238 0.962244 0.0947728
+0.148622 0.207402 0.953986 0.157504
+0.134739 0.216678 0.941642 0.21956
+0.120279 0.225027 0.925266 0.280676
+0.105304 0.232412 0.904928 0.340591
+0.0898784 0.238801 0.880714 0.399047
+0.0740676 0.244168 0.85273 0.455794
+0.0579397 0.24849 0.821094 0.510589
+0.0415636 0.251747 0.785941 0.563198
+0.0250096 0.253927 0.747424 0.613395
+0.00834842 0.255019 0.705706 0.660966
+-0.00834847 0.255019 0.660965 0.705706
+-0.0250096 0.253927 0.613395 0.747424
+-0.0415636 0.251747 0.563198 0.785942
+-0.0579397 0.24849 0.510589 0.821094
+-0.0740677 0.244168 0.455793 0.85273
+-0.0898785 0.238801 0.399046 0.880714
+-0.105304 0.232412 0.34059 0.904928
+-0.120279 0.225027 0.280676 0.925266
+-0.134739 0.216678 0.21956 0.941642
+-0.148622 0.207402 0.157504 0.953986
+-0.161869 0.197238 0.0947728 0.962244
+-0.174422 0.186229 0.031636 0.966382
+-0.186229 0.174422 -0.031636 0.966382
+-0.197238 0.161869 -0.0947728 0.962244
+-0.207402 0.148622 -0.157504 0.953986
+-0.216678 0.134739 -0.21956 0.941642
+-0.225027 0.120279 -0.280676 0.925266
+-0.232412 0.105304 -0.340591 0.904928
+-0.238801 0.0898784 -0.399047 0.880714
+-0.244168 0.0740676 -0.455794 0.85273
+-0.24849 0.0579397 -0.510589 0.821094
+-0.251747 0.0415636 -0.563198 0.785942
+-0.253927 0.0250096 -0.613395 0.747424
+-0.255019 0.00834847 -0.660965 0.705706
+0.306022 0.0100181 0.773807 -0.554502
+0.304712 0.0300115 0.808416 -0.502706
+0.302097 0.0498763 0.839564 -0.448756
+0.298188 0.0695276 0.867117 -0.392885
+0.293002 0.0888812 0.890956 -0.335332
+0.286561 0.107854 0.91098 -0.276343
+0.278894 0.126365 0.927103 -0.21617
+0.270032 0.144335 0.939256 -0.155072
+0.260014 0.161687 0.947388 -0.0933095
+0.248882 0.178347 0.951462 -0.0311476
+0.236685 0.194242 0.951462 0.0311476
+0.223474 0.209307 0.947388 0.0933096
+0.209307 0.223474 0.939256 0.155072
+0.194242 0.236685 0.927103 0.21617
+0.178347 0.248882 0.91098 0.276343
+0.161687 0.260014 0.890956 0.335332
+0.144335 0.270032 0.867116 0.392885
+0.126365 0.278894 0.839564 0.448756
+0.107854 0.286562 0.808416 0.502706
+0.0888812 0.293002 0.773807 0.554502
+0.0695276 0.298188 0.735884 0.603924
+0.0498763 0.302097 0.69481 0.650761
+0.0300115 0.304712 0.65076 0.69481
+0.0100181 0.306022 0.603924 0.735884
+-0.0100182 0.306022 0.554502 0.773807
+-0.0300115 0.304712 0.502706 0.808416
+-0.0498764 0.302097 0.448756 0.839564
+-0.0695276 0.298188 0.392885 0.867117
+-0.0888812 0.293002 0.335332 0.890956
+-0.107854 0.286561 0.276343 0.91098
+-0.126365 0.278894 0.21617 0.927103
+-0.144335 0.270032 0.155072 0.939256
+-0.161687 0.260014 0.0933095 0.947388
+-0.178347 0.248882 0.0311475 0.951462
+-0.194242 0.236685 -0.0311476 0.951462
+-0.209307 0.223474 -0.0933096 0.947388
+-0.223474 0.209307 -0.155072 0.939256
+-0.236685 0.194242 -0.21617 0.927103
+-0.248882 0.178347 -0.276343 0.91098
+-0.260014 0.161687 -0.335332 0.890956
+-0.270032 0.144335 -0.392885 0.867117
+-0.278894 0.126365 -0.448756 0.839564
+-0.286562 0.107854 -0.502706 0.808416
+-0.293002 0.0888812 -0.554502 0.773807
+-0.298188 0.0695276 -0.603924 0.735884
+-0.302097 0.0498764 -0.65076 0.69481
+-0.304712 0.0300115 -0.69481 0.65076
+-0.306022 0.0100182 -0.735884 0.603924
+0.306022 0.0100181 0.603924 -0.735884
+0.304712 0.0300115 0.65076 -0.69481
+0.302097 0.0498763 0.69481 -0.65076
+0.298188 0.0695276 0.735884 -0.603924
+0.293002 0.0888812 0.773807 -0.554502
+0.286561 0.107854 0.808416 -0.502706
+0.278894 0.126365 0.839564 -0.448756
+0.270032 0.144335 0.867117 -0.392885
+0.260014 0.161687 0.890956 -0.335332
+0.248882 0.178347 0.91098 -0.276343
+0.236685 0.194242 0.927103 -0.21617
+0.223474 0.209307 0.939256 -0.155072
+0.209307 0.223474 0.947388 -0.0933095
+0.194242 0.236685 0.951462 -0.0311476
+0.178347 0.248882 0.951462 0.0311477
+0.161687 0.260014 0.947388 0.0933096
+0.144335 0.270032 0.939256 0.155072
+0.126365 0.278894 0.927103 0.21617
+0.107854 0.286562 0.91098 0.276343
+0.0888812 0.293002 0.890956 0.335332
+0.0695276 0.298188 0.867117 0.392885
+0.0498763 0.302097 0.839564 0.448756
+0.0300115 0.304712 0.808416 0.502706
+0.0100181 0.306022 0.773807 0.554502
+-0.0100182 0.306022 0.735884 0.603924
+-0.0300115 0.304712 0.69481 0.650761
+-0.0498764 0.302097 0.65076 0.69481
+-0.0695276 0.298188 0.603924 0.735884
+-0.0888812 0.293002 0.554502 0.773807
+-0.107854 0.286561 0.502705 0.808416
+-0.126365 0.278894 0.448756 0.839564
+-0.144335 0.270032 0.392885 0.867116
+-0.161687 0.260014 0.335332 0.890956
+-0.178347 0.248882 0.276343 0.91098
+-0.194242 0.236685 0.21617 0.927103
+-0.209307 0.223474 0.155072 0.939256
+-0.223474 0.209307 0.0933096 0.947388
+-0.236685 0.194242 0.0311476 0.951462
+-0.248882 0.178347 -0.0311476 0.951462
+-0.260014 0.161687 -0.0933096 0.947388
+-0.270032 0.144335 -0.155072 0.939256
+-0.278894 0.126365 -0.21617 0.927103
+-0.286562 0.107854 -0.276343 0.91098
+-0.293002 0.0888812 -0.335332 0.890956
+-0.298188 0.0695276 -0.392885 0.867116
+-0.302097 0.0498764 -0.448756 0.839564
+-0.304712 0.0300115 -0.502706 0.808416
+-0.306022 0.0100182 -0.554502 0.773807
+0.357026 0.0116878 0.681709 -0.63849
+0.355497 0.0350134 0.722008 -0.592537
+0.352446 0.0581891 0.759216 -0.544047
+0.347886 0.0811156 0.793173 -0.493227
+0.341836 0.103695 0.823733 -0.440295
+0.334322 0.12583 0.850766 -0.385477
+0.325376 0.147426 0.874156 -0.329009
+0.315037 0.168391 0.893803 -0.271132
+0.30335 0.188635 0.909622 -0.212094
+0.290363 0.208071 0.921546 -0.152148
+0.276133 0.226616 0.929524 -0.0915501
+0.26072 0.244191 0.933521 -0.0305603
+0.244191 0.26072 0.933521 0.0305603
+0.226616 0.276133 0.929524 0.0915501
+0.208071 0.290363 0.921546 0.152148
+0.188635 0.30335 0.909622 0.212094
+0.168391 0.315038 0.893803 0.271132
+0.147426 0.325376 0.874156 0.329009
+0.12583 0.334322 0.850766 0.385477
+0.103695 0.341836 0.823733 0.440295
+0.0811155 0.347886 0.793173 0.493227
+0.058189 0.352446 0.759216 0.544047
+0.0350134 0.355497 0.722008 0.592537
+0.0116878 0.357026 0.681709 0.63849
+-0.0116879 0.357026 0.63849 0.681709
+-0.0350134 0.355497 0.592537 0.722008
+-0.0581891 0.352446 0.544047 0.759216
+-0.0811156 0.347886 0.493227 0.793173
+-0.103695 0.341836 0.440294 0.823733
+-0.12583 0.334322 0.385477 0.850766
+-0.147426 0.325376 0.329009 0.874156
+-0.168391 0.315038 0.271132 0.893803
+-0.188635 0.30335 0.212094 0.909622
+-0.208071 0.290363 0.152148 0.921546
+-0.226616 0.276133 0.0915501 0.929524
+-0.244191 0.26072 0.0305603 0.933521
+-0.26072 0.244191 -0.0305603 0.933521
+-0.276133 0.226616 -0.0915501 0.929524
+-0.290363 0.208071 -0.152148 0.921546
+-0.30335 0.188635 -0.212094 0.909622
+-0.315037 0.168391 -0.271132 0.893803
+-0.325376 0.147426 -0.329009 0.874156
+-0.334322 0.12583 -0.385477 0.850766
+-0.341836 0.103695 -0.440295 0.823733
+-0.347886 0.0811155 -0.493227 0.793173
+-0.352446 0.0581891 -0.544047 0.759216
+-0.355497 0.0350134 -0.592537 0.722008
+-0.357026 0.0116879 -0.63849 0.681709
+0.255019 0.00834844 0.959434 -0.119929
+0.253927 0.0250096 0.965223 -0.0569222
+0.251747 0.0415636 0.966879 0.0063283
+0.24849 0.0579397 0.964395 0.0695517
+0.244168 0.0740676 0.957782 0.132477
+0.238801 0.0898784 0.947067 0.194836
+0.232412 0.105304 0.932296 0.256359
+0.225027 0.120279 0.913533 0.316786
+0.216678 0.134739 0.890858 0.375855
+0.207402 0.148622 0.864369 0.433316
+0.197238 0.161869 0.834178 0.48892
+0.186229 0.174422 0.800415 0.542431
+0.174422 0.186229 0.763225 0.593619
+0.161869 0.197238 0.722766 0.642266
+0.148622 0.207402 0.679212 0.688162
+0.134739 0.216678 0.63275 0.731111
+0.120279 0.225027 0.583578 0.770929
+0.105304 0.232412 0.531908 0.807447
+0.0898784 0.238801 0.477959 0.840506
+0.0740676 0.244168 0.421964 0.869967
+0.0579397 0.24849 0.364162 0.895702
+0.0415636 0.251747 0.304801 0.917601
+0.0250096 0.253927 0.244134 0.935572
+0.00834842 0.255019 0.182422 0.949536
+-0.00834847 0.255019 0.119929 0.959434
+-0.0250096 0.253927 0.0569221 0.965223
+-0.0415636 0.251747 -0.00632837 0.966879
+-0.0579397 0.24849 -0.0695517 0.964395
+-0.0740677 0.244168 -0.132477 0.957782
+-0.0898785 0.238801 -0.194836 0.947066
+-0.105304 0.232412 -0.25636 0.932296
+-0.120279 0.225027 -0.316786 0.913533
+-0.134739 0.216678 -0.375855 0.890858
+-0.148622 0.207402 -0.433316 0.864369
+-0.161869 0.197238 -0.48892 0.834178
+-0.174422 0.186229 -0.542431 0.800415
+-0.186229 0.174422 -0.593619 0.763225
+-0.197238 0.161869 -0.642266 0.722766
+-0.207402 0.148622 -0.688162 0.679212
+-0.216678 0.134739 -0.731111 0.63275
+-0.225027 0.120279 -0.770929 0.583578
+-0.232412 0.105304 -0.807447 0.531908
+-0.238801 0.0898784 -0.840506 0.477959
+-0.244168 0.0740676 -0.869967 0.421964
+-0.24849 0.0579397 -0.895702 0.364162
+-0.251747 0.0415636 -0.917601 0.304801
+-0.253927 0.0250096 -0.935572 0.244134
+-0.255019 0.00834847 -0.949536 0.182422
+0.306022 0.0100181 0.947388 -0.0933095
+0.304712 0.0300115 0.951462 -0.0311476
+0.302097 0.0498763 0.951462 0.0311476
+0.298188 0.0695276 0.947388 0.0933096
+0.293002 0.0888812 0.939256 0.155072
+0.286561 0.107854 0.927103 0.21617
+0.278894 0.126365 0.91098 0.276343
+0.270032 0.144335 0.890956 0.335332
+0.260014 0.161687 0.867117 0.392885
+0.248882 0.178347 0.839564 0.448756
+0.236685 0.194242 0.808416 0.502706
+0.223474 0.209307 0.773807 0.554502
+0.209307 0.223474 0.735884 0.603924
+0.194242 0.236685 0.69481 0.65076
+0.178347 0.248882 0.65076 0.69481
+0.161687 0.260014 0.603924 0.735884
+0.144335 0.270032 0.554502 0.773807
+0.126365 0.278894 0.502706 0.808416
+0.107854 0.286562 0.448756 0.839564
+0.0888812 0.293002 0.392885 0.867117
+0.0695276 0.298188 0.335332 0.890956
+0.0498763 0.302097 0.276343 0.91098
+0.0300115 0.304712 0.21617 0.927103
+0.0100181 0.306022 0.155072 0.939256
+-0.0100182 0.306022 0.0933094 0.947388
+-0.0300115 0.304712 0.0311476 0.951462
+-0.0498764 0.302097 -0.0311477 0.951462
+-0.0695276 0.298188 -0.0933096 0.947388
+-0.0888812 0.293002 -0.155072 0.939256
+-0.107854 0.286561 -0.21617 0.927103
+-0.126365 0.278894 -0.276343 0.91098
+-0.144335 0.270032 -0.335332 0.890956
+-0.161687 0.260014 -0.392885 0.867117
+-0.178347 0.248882 -0.448756 0.839564
+-0.194242 0.236685 -0.502706 0.808416
+-0.209307 0.223474 -0.554502 0.773807
+-0.223474 0.209307 -0.603924 0.735884
+-0.236685 0.194242 -0.650761 0.69481
+-0.248882 0.178347 -0.69481 0.650761
+-0.260014 0.161687 -0.735884 0.603924
+-0.270032 0.144335 -0.773807 0.554502
+-0.278894 0.126365 -0.808416 0.502706
+-0.286562 0.107854 -0.839564 0.448756
+-0.293002 0.0888812 -0.867117 0.392885
+-0.298188 0.0695276 -0.890956 0.335332
+-0.302097 0.0498764 -0.91098 0.276343
+-0.304712 0.0300115 -0.927103 0.21617
+-0.306022 0.0100182 -0.939256 0.155072
+0.306022 0.0100181 0.890956 -0.335332
+0.304712 0.0300115 0.91098 -0.276343
+0.302097 0.0498763 0.927103 -0.21617
+0.298188 0.0695276 0.939256 -0.155072
+0.293002 0.0888812 0.947388 -0.0933095
+0.286561 0.107854 0.951462 -0.0311477
+0.278894 0.126365 0.951462 0.0311476
+0.270032 0.144335 0.947388 0.0933096
+0.260014 0.161687 0.939256 0.155072
+0.248882 0.178347 0.927103 0.21617
+0.236685 0.194242 0.91098 0.276343
+0.223474 0.209307 0.890956 0.335332
+0.209307 0.223474 0.867117 0.392885
+0.194242 0.236685 0.839564 0.448756
+0.178347 0.248882 0.808416 0.502706
+0.161687 0.260014 0.773807 0.554502
+0.144335 0.270032 0.735884 0.603924
+0.126365 0.278894 0.69481 0.650761
+0.107854 0.286562 0.65076 0.69481
+0.0888812 0.293002 0.603924 0.735884
+0.0695276 0.298188 0.554502 0.773807
+0.0498763 0.302097 0.502706 0.808416
+0.0300115 0.304712 0.448756 0.839564
+0.0100181 0.306022 0.392885 0.867117
+-0.0100182 0.306022 0.335332 0.890956
+-0.0300115 0.304712 0.276343 0.91098
+-0.0498764 0.302097 0.21617 0.927103
+-0.0695276 0.298188 0.155072 0.939256
+-0.0888812 0.293002 0.0933094 0.947388
+-0.107854 0.286561 0.0311475 0.951462
+-0.126365 0.278894 -0.0311478 0.951462
+-0.144335 0.270032 -0.0933094 0.947388
+-0.161687 0.260014 -0.155072 0.939256
+-0.178347 0.248882 -0.21617 0.927103
+-0.194242 0.236685 -0.276343 0.91098
+-0.209307 0.223474 -0.335332 0.890956
+-0.223474 0.209307 -0.392885 0.867117
+-0.236685 0.194242 -0.448756 0.839564
+-0.248882 0.178347 -0.502706 0.808416
+-0.260014 0.161687 -0.554502 0.773807
+-0.270032 0.144335 -0.603924 0.735884
+-0.278894 0.126365 -0.65076 0.69481
+-0.286562 0.107854 -0.69481 0.65076
+-0.293002 0.0888812 -0.735884 0.603924
+-0.298188 0.0695276 -0.773807 0.554502
+-0.302097 0.0498764 -0.808416 0.502706
+-0.304712 0.0300115 -0.839564 0.448756
+-0.306022 0.0100182 -0.867116 0.392885
+0.357026 0.0116878 0.891229 -0.279477
+0.355497 0.0350134 0.907599 -0.22059
+0.352446 0.0581891 0.920083 -0.160758
+0.347886 0.0811156 0.928627 -0.100237
+0.341836 0.103695 0.933195 -0.0392873
+0.334322 0.12583 0.933766 0.0218307
+0.325376 0.147426 0.930339 0.0828552
+0.315037 0.168391 0.922928 0.143525
+0.30335 0.188635 0.911565 0.20358
+0.290363 0.208071 0.896299 0.262763
+0.276133 0.226616 0.877194 0.320821
+0.26072 0.244191 0.854333 0.377506
+0.244191 0.26072 0.827814 0.432574
+0.226616 0.276133 0.79775 0.485789
+0.208071 0.290363 0.76427 0.536924
+0.188635 0.30335 0.727517 0.58576
+0.168391 0.315038 0.687649 0.632088
+0.147426 0.325376 0.644836 0.675709
+0.12583 0.334322 0.599262 0.716437
+0.103695 0.341836 0.551121 0.754096
+0.0811155 0.347886 0.500621 0.788527
+0.058189 0.352446 0.447977 0.819581
+0.0350134 0.355497 0.393415 0.847125
+0.0116878 0.357026 0.337168 0.871042
+-0.0116879 0.357026 0.279477 0.891229
+-0.0350134 0.355497 0.22059 0.907599
+-0.0581891 0.352446 0.160757 0.920083
+-0.0811156 0.347886 0.100237 0.928627
+-0.103695 0.341836 0.0392871 0.933195
+-0.12583 0.334322 -0.0218308 0.933766
+-0.147426 0.325376 -0.0828553 0.930339
+-0.168391 0.315038 -0.143525 0.922928
+-0.188635 0.30335 -0.20358 0.911565
+-0.208071 0.290363 -0.262763 0.896299
+-0.226616 0.276133 -0.320821 0.877194
+-0.244191 0.26072 -0.377506 0.854333
+-0.26072 0.244191 -0.432574 0.827814
+-0.276133 0.226616 -0.485789 0.79775
+-0.290363 0.208071 -0.536924 0.76427
+-0.30335 0.188635 -0.58576 0.727517
+-0.315037 0.168391 -0.632088 0.687649
+-0.325376 0.147426 -0.675709 0.644836
+-0.334322 0.12583 -0.716437 0.599262
+-0.341836 0.103695 -0.754096 0.551121
+-0.347886 0.0811155 -0.788527 0.500621
+-0.352446 0.0581891 -0.819581 0.447977
+-0.355497 0.0350134 -0.847125 0.393415
+-0.357026 0.0116879 -0.871042 0.337168
+0.357026 0.0116878 0.931073 -0.0741531
+0.355497 0.0350134 0.933929 -0.0130992
+0.352446 0.0581891 0.932787 0.0480108
+0.347886 0.0811156 0.927649 0.108915
+0.341836 0.103695 0.91854 0.169353
+0.334322 0.12583 0.905497 0.229066
+0.325376 0.147426 0.888577 0.287798
+0.315037 0.168391 0.867851 0.345297
+0.30335 0.188635 0.843409 0.401318
+0.290363 0.208071 0.815356 0.45562
+0.276133 0.226616 0.783811 0.507972
+0.26072 0.244191 0.74891 0.558148
+0.244191 0.26072 0.710802 0.605934
+0.226616 0.276133 0.66965 0.651125
+0.208071 0.290363 0.625631 0.693528
+0.188635 0.30335 0.578932 0.732962
+0.168391 0.315038 0.529755 0.769256
+0.147426 0.325376 0.478309 0.802257
+0.12583 0.334322 0.424815 0.831822
+0.103695 0.341836 0.369501 0.857825
+0.0811155 0.347886 0.312606 0.880155
+0.058189 0.352446 0.254371 0.898716
+0.0350134 0.355497 0.195048 0.913429
+0.0116878 0.357026 0.134889 0.92423
+-0.0116879 0.357026 0.074153 0.931073
+-0.0350134 0.355497 0.0130991 0.933929
+-0.0581891 0.352446 -0.0480108 0.932787
+-0.0811156 0.347886 -0.108915 0.927649
+-0.103695 0.341836 -0.169353 0.91854
+-0.12583 0.334322 -0.229066 0.905497
+-0.147426 0.325376 -0.287798 0.888577
+-0.168391 0.315038 -0.345297 0.867851
+-0.188635 0.30335 -0.401318 0.84341
+-0.208071 0.290363 -0.455621 0.815356
+-0.226616 0.276133 -0.507972 0.783812
+-0.244191 0.26072 -0.558148 0.74891
+-0.26072 0.244191 -0.605934 0.710802
+-0.276133 0.226616 -0.651125 0.66965
+-0.290363 0.208071 -0.693528 0.625631
+-0.30335 0.188635 -0.732962 0.578933
+-0.315037 0.168391 -0.769256 0.529755
+-0.325376 0.147426 -0.802257 0.478309
+-0.334322 0.12583 -0.831822 0.424815
+-0.341836 0.103695 -0.857825 0.369501
+-0.347886 0.0811155 -0.880155 0.312606
+-0.352446 0.0581891 -0.898716 0.254372
+-0.355497 0.0350134 -0.913429 0.195048
+-0.357026 0.0116879 -0.92423 0.134889
+0.40803 0.0133575 0.910916 -0.0597046
+0.406282 0.0400153 0.912871 2.49393e-09
+0.402795 0.0665018 0.910916 0.0597046
+0.397584 0.0927035 0.905061 0.119154
+0.390669 0.118508 0.89533 0.178092
+0.382082 0.143805 0.881766 0.236268
+0.371859 0.168487 0.864425 0.293433
+0.360043 0.192447 0.843383 0.349341
+0.346685 0.215583 0.818729 0.403752
+0.331843 0.237796 0.790569 0.456435
+0.31558 0.25899 0.759024 0.507164
+0.297966 0.279075 0.724229 0.555721
+0.279075 0.297966 0.686333 0.601898
+0.25899 0.31558 0.645497 0.645497
+0.237796 0.331843 0.601898 0.686333
+0.215583 0.346685 0.555721 0.724229
+0.192447 0.360043 0.507164 0.759024
+0.168487 0.371859 0.456435 0.790569
+0.143805 0.382082 0.403752 0.818729
+0.118508 0.390669 0.349341 0.843383
+0.0927035 0.397584 0.293433 0.864425
+0.0665017 0.402795 0.236268 0.881766
+0.0400153 0.406282 0.178092 0.89533
+0.0133575 0.40803 0.119153 0.905061
+-0.0133575 0.40803 0.0597045 0.910916
+-0.0400154 0.406282 -7.64039e-08 0.912871
+-0.0665018 0.402795 -0.0597047 0.910916
+-0.0927035 0.397584 -0.119154 0.905061
+-0.118508 0.390669 -0.178092 0.89533
+-0.143806 0.382082 -0.236269 0.881766
+-0.168487 0.371859 -0.293433 0.864425
+-0.192447 0.360043 -0.34934 0.843383
+-0.215583 0.346685 -0.403752 0.818729
+-0.237796 0.331843 -0.456436 0.790569
+-0.25899 0.31558 -0.507164 0.759024
+-0.279075 0.297966 -0.555721 0.724229
+-0.297966 0.279075 -0.601898 0.686333
+-0.31558 0.25899 -0.645497 0.645497
+-0.331843 0.237796 -0.686333 0.601898
+-0.346685 0.215583 -0.724229 0.555721
+-0.360043 0.192447 -0.759024 0.507164
+-0.371859 0.168487 -0.790569 0.456435
+-0.382082 0.143805 -0.818729 0.403752
+-0.390669 0.118508 -0.843383 0.349341
+-0.397584 0.0927035 -0.864425 0.293433
+-0.402795 0.0665018 -0.881766 0.236268
+-0.406282 0.0400153 -0.89533 0.178092
+-0.40803 0.0133575 -0.905061 0.119154
+0.40803 0.0133575 0.881766 -0.236268
+0.406282 0.0400153 0.89533 -0.178092
+0.402795 0.0665018 0.905061 -0.119154
+0.397584 0.0927035 0.910916 -0.0597046
+0.390669 0.118508 0.912871 6.80367e-10
+0.382082 0.143805 0.910916 0.0597046
+0.371859 0.168487 0.905061 0.119154
+0.360043 0.192447 0.89533 0.178092
+0.346685 0.215583 0.881766 0.236268
+0.331843 0.237796 0.864425 0.293433
+0.31558 0.25899 0.843383 0.349341
+0.297966 0.279075 0.818729 0.403753
+0.279075 0.297966 0.790569 0.456435
+0.25899 0.31558 0.759024 0.507164
+0.237796 0.331843 0.724229 0.555721
+0.215583 0.346685 0.686333 0.601898
+0.192447 0.360043 0.645497 0.645497
+0.168487 0.371859 0.601898 0.686333
+0.143805 0.382082 0.555721 0.724229
+0.118508 0.390669 0.507164 0.759024
+0.0927035 0.397584 0.456435 0.790569
+0.0665017 0.402795 0.403752 0.818729
+0.0400153 0.406282 0.34934 0.843383
+0.0133575 0.40803 0.293433 0.864425
+-0.0133575 0.40803 0.236268 0.881766
+-0.0400154 0.406282 0.178092 0.89533
+-0.0665018 0.402795 0.119154 0.905061
+-0.0927035 0.397584 0.0597046 0.910916
+-0.118508 0.390669 -1.49406e-07 0.912871
+-0.143806 0.382082 -0.0597048 0.910916
+-0.168487 0.371859 -0.119154 0.905061
+-0.192447 0.360043 -0.178092 0.89533
+-0.215583 0.346685 -0.236268 0.881766
+-0.237796 0.331843 -0.293433 0.864425
+-0.25899 0.31558 -0.349341 0.843383
+-0.279075 0.297966 -0.403753 0.818729
+-0.297966 0.279075 -0.456435 0.790569
+-0.31558 0.25899 -0.507164 0.759024
+-0.331843 0.237796 -0.555721 0.724229
+-0.346685 0.215583 -0.601898 0.686333
+-0.360043 0.192447 -0.645497 0.645497
+-0.371859 0.168487 -0.686333 0.601898
+-0.382082 0.143805 -0.724229 0.555721
+-0.390669 0.118508 -0.759024 0.507164
+-0.397584 0.0927035 -0.790569 0.456435
+-0.402795 0.0665018 -0.818729 0.403753
+-0.406282 0.0400153 -0.843383 0.349341
+-0.40803 0.0133575 -0.864425 0.293433
+0.456191 0.0149342 0.877872 -0.144937
+0.454238 0.0447385 0.885472 -0.0872114
+0.450339 0.0743513 0.88928 -0.029112
+0.444512 0.103646 0.88928 0.029112
+0.436782 0.132496 0.885472 0.0872114
+0.427181 0.160779 0.877872 0.144937
+0.415751 0.188374 0.866513 0.202043
+0.40254 0.215162 0.851444 0.258283
+0.387606 0.241029 0.832728 0.313417
+0.371012 0.265863 0.810447 0.367209
+0.352829 0.28956 0.784695 0.419428
+0.333136 0.312016 0.755583 0.469852
+0.312016 0.333136 0.723236 0.518263
+0.28956 0.352829 0.687791 0.564456
+0.265863 0.371012 0.649401 0.608231
+0.241029 0.387606 0.608231 0.649401
+0.215162 0.40254 0.564456 0.687791
+0.188374 0.415751 0.518263 0.723236
+0.160779 0.427181 0.469852 0.755583
+0.132496 0.436782 0.419428 0.784695
+0.103646 0.444512 0.367209 0.810447
+0.0743512 0.450339 0.313417 0.832728
+0.0447384 0.454238 0.258283 0.851444
+0.0149341 0.456191 0.202042 0.866513
+-0.0149342 0.456191 0.144937 0.877872
+-0.0447385 0.454238 0.0872113 0.885472
+-0.0743513 0.450339 0.029112 0.88928
+-0.103646 0.444512 -0.0291121 0.88928
+-0.132496 0.436781 -0.0872115 0.885472
+-0.160779 0.427181 -0.144937 0.877872
+-0.188374 0.415751 -0.202043 0.866513
+-0.215162 0.40254 -0.258283 0.851444
+-0.241029 0.387606 -0.313417 0.832728
+-0.265864 0.371012 -0.367209 0.810447
+-0.28956 0.352829 -0.419428 0.784695
+-0.312016 0.333136 -0.469852 0.755583
+-0.333136 0.312016 -0.518263 0.723236
+-0.352829 0.28956 -0.564456 0.687791
+-0.371012 0.265864 -0.608231 0.649401
+-0.387606 0.241029 -0.649401 0.608231
+-0.40254 0.215162 -0.687791 0.564456
+-0.415751 0.188374 -0.723236 0.518263
+-0.427181 0.160779 -0.755583 0.469852
+-0.436782 0.132496 -0.784695 0.419428
+-0.444512 0.103646 -0.810447 0.367209
+-0.450339 0.0743513 -0.832728 0.313417
+-0.454238 0.0447385 -0.851444 0.258283
+-0.456191 0.0149342 -0.866513 0.202043
+0.357026 0.0116878 0.806694 -0.470787
+0.355497 0.0350134 0.835758 -0.417019
+0.352446 0.0581891 0.861243 -0.361465
+0.347886 0.0811156 0.88304 -0.304363
+0.341836 0.103695 0.901055 -0.245958
+0.334322 0.12583 0.915212 -0.186499
+0.325376 0.147426 0.925451 -0.126242
+0.315037 0.168391 0.931726 -0.0654444
+0.30335 0.188635 0.934011 -0.00436652
+0.290363 0.208071 0.932297 0.0567301
+0.276133 0.226616 0.92659 0.117584
+0.26072 0.244191 0.916916 0.177934
+0.244191 0.26072 0.903316 0.237522
+0.226616 0.276133 0.885847 0.296093
+0.208071 0.290363 0.864585 0.353396
+0.188635 0.30335 0.83962 0.409186
+0.168391 0.315038 0.811061 0.463224
+0.147426 0.325376 0.779028 0.515278
+0.12583 0.334322 0.743659 0.565126
+0.103695 0.341836 0.705106 0.612553
+0.0811155 0.347886 0.663533 0.657358
+0.058189 0.352446 0.619119 0.699348
+0.0350134 0.355497 0.572054 0.738343
+0.0116878 0.357026 0.522539 0.774176
+-0.0116879 0.357026 0.470787 0.806694
+-0.0350134 0.355497 0.417019 0.835758
+-0.0581891 0.352446 0.361465 0.861243
+-0.0811156 0.347886 0.304363 0.88304
+-0.103695 0.341836 0.245957 0.901055
+-0.12583 0.334322 0.186499 0.915213
+-0.147426 0.325376 0.126242 0.925451
+-0.168391 0.315038 0.0654445 0.931726
+-0.188635 0.30335 0.00436653 0.934011
+-0.208071 0.290363 -0.0567302 0.932297
+-0.226616 0.276133 -0.117584 0.92659
+-0.244191 0.26072 -0.177934 0.916916
+-0.26072 0.244191 -0.237522 0.903316
+-0.276133 0.226616 -0.296093 0.885847
+-0.290363 0.208071 -0.353396 0.864585
+-0.30335 0.188635 -0.409186 0.83962
+-0.315037 0.168391 -0.463224 0.811061
+-0.325376 0.147426 -0.515278 0.779028
+-0.334322 0.12583 -0.565126 0.743659
+-0.341836 0.103695 -0.612553 0.705106
+-0.347886 0.0811155 -0.657358 0.663533
+-0.352446 0.0581891 -0.699348 0.619119
+-0.355497 0.0350134 -0.738343 0.572054
+-0.357026 0.0116879 -0.774176 0.522539
+0.40803 0.0133575 0.818729 -0.403752
+0.406282 0.0400153 0.843383 -0.349341
+0.402795 0.0665018 0.864425 -0.293433
+0.397584 0.0927035 0.881766 -0.236268
+0.390669 0.118508 0.89533 -0.178092
+0.382082 0.143805 0.905061 -0.119154
+0.371859 0.168487 0.910916 -0.0597046
+0.360043 0.192447 0.912871 1.92711e-08
+0.346685 0.215583 0.910916 0.0597046
+0.331843 0.237796 0.905061 0.119154
+0.31558 0.25899 0.89533 0.178092
+0.297966 0.279075 0.881766 0.236268
+0.279075 0.297966 0.864425 0.293433
+0.25899 0.31558 0.843383 0.349341
+0.237796 0.331843 0.818729 0.403753
+0.215583 0.346685 0.790569 0.456436
+0.192447 0.360043 0.759024 0.507164
+0.168487 0.371859 0.724229 0.555721
+0.143805 0.382082 0.686333 0.601898
+0.118508 0.390669 0.645497 0.645497
+0.0927035 0.397584 0.601898 0.686333
+0.0665017 0.402795 0.555721 0.724229
+0.0400153 0.406282 0.507164 0.759024
+0.0133575 0.40803 0.456435 0.790569
+-0.0133575 0.40803 0.403752 0.818729
+-0.0400154 0.406282 0.349341 0.843383
+-0.0665018 0.402795 0.293433 0.864425
+-0.0927035 0.397584 0.236268 0.881766
+-0.118508 0.390669 0.178092 0.89533
+-0.143806 0.382082 0.119153 0.905061
+-0.168487 0.371859 0.0597045 0.910916
+-0.192447 0.360043 1.0406e-07 0.912871
+-0.215583 0.346685 -0.0597046 0.910916
+-0.237796 0.331843 -0.119154 0.905061
+-0.25899 0.31558 -0.178092 0.89533
+-0.279075 0.297966 -0.236268 0.881766
+-0.297966 0.279075 -0.293433 0.864425
+-0.31558 0.25899 -0.349341 0.843383
+-0.331843 0.237796 -0.403752 0.818729
+-0.346685 0.215583 -0.456435 0.790569
+-0.360043 0.192447 -0.507164 0.759024
+-0.371859 0.168487 -0.555721 0.724229
+-0.382082 0.143805 -0.601898 0.686333
+-0.390669 0.118508 -0.645497 0.645497
+-0.397584 0.0927035 -0.686333 0.601898
+-0.402795 0.0665018 -0.724229 0.555721
+-0.406282 0.0400153 -0.759024 0.507164
+-0.40803 0.0133575 -0.790569 0.456436
+0.40803 0.0133575 0.724229 -0.555721
+0.406282 0.0400153 0.759024 -0.507164
+0.402795 0.0665018 0.790569 -0.456435
+0.397584 0.0927035 0.818729 -0.403752
+0.390669 0.118508 0.843383 -0.349341
+0.382082 0.143805 0.864425 -0.293433
+0.371859 0.168487 0.881766 -0.236268
+0.360043 0.192447 0.89533 -0.178092
+0.346685 0.215583 0.905061 -0.119154
+0.331843 0.237796 0.910916 -0.0597046
+0.31558 0.25899 0.912871 -1.65496e-08
+0.297966 0.279075 0.910916 0.0597046
+0.279075 0.297966 0.905061 0.119154
+0.25899 0.31558 0.89533 0.178092
+0.237796 0.331843 0.881766 0.236268
+0.215583 0.346685 0.864425 0.293433
+0.192447 0.360043 0.843383 0.349341
+0.168487 0.371859 0.818729 0.403753
+0.143805 0.382082 0.790569 0.456436
+0.118508 0.390669 0.759024 0.507164
+0.0927035 0.397584 0.724229 0.555721
+0.0665017 0.402795 0.686333 0.601898
+0.0400153 0.406282 0.645497 0.645497
+0.0133575 0.40803 0.601898 0.686333
+-0.0133575 0.40803 0.555721 0.724229
+-0.0400154 0.406282 0.507164 0.759024
+-0.0665018 0.402795 0.456435 0.790569
+-0.0927035 0.397584 0.403752 0.818729
+-0.118508 0.390669 0.34934 0.843383
+-0.143806 0.382082 0.293433 0.864425
+-0.168487 0.371859 0.236268 0.881766
+-0.192447 0.360043 0.178092 0.89533
+-0.215583 0.346685 0.119154 0.905061
+-0.237796 0.331843 0.0597045 0.910916
+-0.25899 0.31558 3.10581e-08 0.912871
+-0.279075 0.297966 -0.0597047 0.910916
+-0.297966 0.279075 -0.119154 0.905061
+-0.31558 0.25899 -0.178092 0.89533
+-0.331843 0.237796 -0.236268 0.881766
+-0.346685 0.215583 -0.293433 0.864425
+-0.360043 0.192447 -0.34934 0.843383
+-0.371859 0.168487 -0.403752 0.818729
+-0.382082 0.143805 -0.456436 0.790569
+-0.390669 0.118508 -0.507164 0.759024
+-0.397584 0.0927035 -0.555721 0.724229
+-0.402795 0.0665018 -0.601898 0.686333
+-0.406282 0.0400153 -0.645497 0.645497
+-0.40803 0.0133575 -0.686333 0.601898
+0.456191 0.0149342 0.755583 -0.469852
+0.454238 0.0447385 0.784695 -0.419428
+0.450339 0.0743513 0.810447 -0.367209
+0.444512 0.103646 0.832728 -0.313417
+0.436782 0.132496 0.851444 -0.258283
+0.427181 0.160779 0.866513 -0.202043
+0.415751 0.188374 0.877872 -0.144937
+0.40254 0.215162 0.885472 -0.0872114
+0.387606 0.241029 0.88928 -0.029112
+0.371012 0.265863 0.88928 0.029112
+0.352829 0.28956 0.885472 0.0872114
+0.333136 0.312016 0.877872 0.144937
+0.312016 0.333136 0.866513 0.202043
+0.28956 0.352829 0.851444 0.258283
+0.265863 0.371012 0.832728 0.313417
+0.241029 0.387606 0.810447 0.367209
+0.215162 0.40254 0.784695 0.419428
+0.188374 0.415751 0.755583 0.469852
+0.160779 0.427181 0.723236 0.518263
+0.132496 0.436782 0.687791 0.564456
+0.103646 0.444512 0.649401 0.608231
+0.0743512 0.450339 0.608231 0.649401
+0.0447384 0.454238 0.564455 0.687791
+0.0149341 0.456191 0.518263 0.723236
+-0.0149342 0.456191 0.469852 0.755583
+-0.0447385 0.454238 0.419428 0.784695
+-0.0743513 0.450339 0.367209 0.810447
+-0.103646 0.444512 0.313417 0.832728
+-0.132496 0.436781 0.258283 0.851444
+-0.160779 0.427181 0.202042 0.866513
+-0.188374 0.415751 0.144937 0.877872
+-0.215162 0.40254 0.0872115 0.885472
+-0.241029 0.387606 0.029112 0.88928
+-0.265864 0.371012 -0.0291121 0.88928
+-0.28956 0.352829 -0.0872114 0.885472
+-0.312016 0.333136 -0.144937 0.877872
+-0.333136 0.312016 -0.202043 0.866513
+-0.352829 0.28956 -0.258283 0.851444
+-0.371012 0.265864 -0.313417 0.832728
+-0.387606 0.241029 -0.367209 0.810447
+-0.40254 0.215162 -0.419428 0.784695
+-0.415751 0.188374 -0.469852 0.755583
+-0.427181 0.160779 -0.518263 0.723236
+-0.436782 0.132496 -0.564456 0.687791
+-0.444512 0.103646 -0.608231 0.649401
+-0.450339 0.0743513 -0.649401 0.608231
+-0.454238 0.0447385 -0.687791 0.564456
+-0.456191 0.0149342 -0.723236 0.518263
+0.456191 0.0149342 0.832728 -0.313417
+0.454238 0.0447385 0.851444 -0.258283
+0.450339 0.0743513 0.866513 -0.202043
+0.444512 0.103646 0.877872 -0.144937
+0.436782 0.132496 0.885472 -0.0872114
+0.427181 0.160779 0.88928 -0.029112
+0.415751 0.188374 0.88928 0.029112
+0.40254 0.215162 0.885472 0.0872114
+0.387606 0.241029 0.877872 0.144937
+0.371012 0.265863 0.866513 0.202043
+0.352829 0.28956 0.851444 0.258283
+0.333136 0.312016 0.832728 0.313417
+0.312016 0.333136 0.810447 0.367209
+0.28956 0.352829 0.784695 0.419428
+0.265863 0.371012 0.755583 0.469852
+0.241029 0.387606 0.723236 0.518263
+0.215162 0.40254 0.687791 0.564456
+0.188374 0.415751 0.649401 0.608231
+0.160779 0.427181 0.608231 0.649401
+0.132496 0.436782 0.564456 0.687791
+0.103646 0.444512 0.518263 0.723236
+0.0743512 0.450339 0.469852 0.755583
+0.0447384 0.454238 0.419428 0.784695
+0.0149341 0.456191 0.367209 0.810447
+-0.0149342 0.456191 0.313417 0.832728
+-0.0447385 0.454238 0.258283 0.851444
+-0.0743513 0.450339 0.202043 0.866513
+-0.103646 0.444512 0.144937 0.877872
+-0.132496 0.436781 0.0872112 0.885472
+-0.160779 0.427181 0.0291119 0.88928
+-0.188374 0.415751 -0.0291121 0.88928
+-0.215162 0.40254 -0.0872113 0.885472
+-0.241029 0.387606 -0.144937 0.877872
+-0.265864 0.371012 -0.202043 0.866513
+-0.28956 0.352829 -0.258283 0.851444
+-0.312016 0.333136 -0.313417 0.832728
+-0.333136 0.312016 -0.367209 0.810447
+-0.352829 0.28956 -0.419428 0.784695
+-0.371012 0.265864 -0.469852 0.755583
+-0.387606 0.241029 -0.518263 0.723236
+-0.40254 0.215162 -0.564455 0.687791
+-0.415751 0.188374 -0.608231 0.649401
+-0.427181 0.160779 -0.649401 0.608231
+-0.436782 0.132496 -0.687791 0.564456
+-0.444512 0.103646 -0.723236 0.518263
+-0.450339 0.0743513 -0.755583 0.469852
+-0.454238 0.0447385 -0.784695 0.419428
+-0.456191 0.0149342 -0.810447 0.367209
+0.499732 0.0163595 0.836516 -0.224144
+0.497592 0.0490086 0.849385 -0.168953
+0.493322 0.0814477 0.858616 -0.113039
+0.486938 0.113538 0.864171 -0.0566408
+0.47847 0.145142 0.866025 6.45453e-10
+0.467953 0.176125 0.864171 0.0566408
+0.455432 0.206354 0.858616 0.113039
+0.440961 0.235698 0.849385 0.168953
+0.424601 0.264034 0.836516 0.224144
+0.406423 0.291239 0.820066 0.278375
+0.386505 0.317197 0.800103 0.331414
+0.364932 0.341796 0.776715 0.383033
+0.341796 0.364932 0.75 0.433013
+0.317197 0.386505 0.720074 0.481138
+0.291239 0.406423 0.687064 0.527203
+0.264034 0.424601 0.651112 0.57101
+0.235698 0.440961 0.612372 0.612372
+0.206353 0.455432 0.57101 0.651112
+0.176125 0.467953 0.527203 0.687064
+0.145142 0.47847 0.481138 0.720074
+0.113538 0.486938 0.433013 0.75
+0.0814477 0.493322 0.383033 0.776715
+0.0490085 0.497592 0.331413 0.800103
+0.0163595 0.499732 0.278375 0.820066
+-0.0163596 0.499732 0.224144 0.836516
+-0.0490086 0.497592 0.168953 0.849385
+-0.0814478 0.493322 0.113039 0.858616
+-0.113538 0.486938 0.0566407 0.864171
+-0.145142 0.47847 -1.41739e-07 0.866025
+-0.176125 0.467953 -0.0566409 0.864171
+-0.206354 0.455432 -0.113039 0.858616
+-0.235698 0.440961 -0.168953 0.849385
+-0.264034 0.424601 -0.224144 0.836516
+-0.291239 0.406423 -0.278375 0.820066
+-0.317197 0.386505 -0.331414 0.800103
+-0.341796 0.364932 -0.383033 0.776715
+-0.364932 0.341796 -0.433013 0.75
+-0.386505 0.317197 -0.481138 0.720074
+-0.406423 0.291239 -0.527203 0.687064
+-0.424601 0.264034 -0.57101 0.651112
+-0.440961 0.235698 -0.612372 0.612373
+-0.455432 0.206354 -0.651112 0.57101
+-0.467953 0.176125 -0.687064 0.527203
+-0.47847 0.145142 -0.720074 0.481138
+-0.486938 0.113538 -0.75 0.433013
+-0.493322 0.0814478 -0.776715 0.383033
+-0.497592 0.0490085 -0.800103 0.331414
+-0.499732 0.0163596 -0.820066 0.278375
+0.499732 0.0163595 0.776715 -0.383033
+0.497592 0.0490086 0.800103 -0.331414
+0.493322 0.0814477 0.820066 -0.278375
+0.486938 0.113538 0.836516 -0.224144
+0.47847 0.145142 0.849385 -0.168953
+0.467953 0.176125 0.858616 -0.113039
+0.455432 0.206354 0.864171 -0.0566408
+0.440961 0.235698 0.866025 1.82821e-08
+0.424601 0.264034 0.864171 0.0566408
+0.406423 0.291239 0.858616 0.113039
+0.386505 0.317197 0.849385 0.168953
+0.364932 0.341796 0.836516 0.224144
+0.341796 0.364932 0.820066 0.278375
+0.317197 0.386505 0.800103 0.331414
+0.291239 0.406423 0.776715 0.383033
+0.264034 0.424601 0.75 0.433013
+0.235698 0.440961 0.720074 0.481138
+0.206353 0.455432 0.687064 0.527203
+0.176125 0.467953 0.651112 0.57101
+0.145142 0.47847 0.612372 0.612372
+0.113538 0.486938 0.57101 0.651112
+0.0814477 0.493322 0.527203 0.687064
+0.0490085 0.497592 0.481138 0.720074
+0.0163595 0.499732 0.433013 0.75
+-0.0163596 0.499732 0.383033 0.776715
+-0.0490086 0.497592 0.331414 0.800103
+-0.0814478 0.493322 0.278375 0.820066
+-0.113538 0.486938 0.224144 0.836516
+-0.145142 0.47847 0.168953 0.849385
+-0.176125 0.467953 0.113039 0.858616
+-0.206354 0.455432 0.0566407 0.864171
+-0.235698 0.440961 9.87201e-08 0.866025
+-0.264034 0.424601 -0.0566408 0.864171
+-0.291239 0.406423 -0.113039 0.858616
+-0.317197 0.386505 -0.168953 0.849385
+-0.341796 0.364932 -0.224144 0.836516
+-0.364932 0.341796 -0.278375 0.820066
+-0.386505 0.317197 -0.331414 0.800103
+-0.406423 0.291239 -0.383033 0.776715
+-0.424601 0.264034 -0.433013 0.75
+-0.440961 0.235698 -0.481138 0.720074
+-0.455432 0.206354 -0.527203 0.687064
+-0.467953 0.176125 -0.57101 0.651112
+-0.47847 0.145142 -0.612372 0.612372
+-0.486938 0.113538 -0.651112 0.57101
+-0.493322 0.0814478 -0.687064 0.527203
+-0.497592 0.0490085 -0.720074 0.481138
+-0.499732 0.0163596 -0.75 0.433013
+0.539773 0.0176703 0.787682 -0.296463
+0.537461 0.0529353 0.805385 -0.244311
+0.532848 0.0879736 0.81964 -0.191113
+0.525954 0.122635 0.830384 -0.137097
+0.516807 0.156772 0.837573 -0.0824937
+0.505447 0.190237 0.841175 -0.0275372
+0.491923 0.222887 0.841175 0.0275372
+0.476292 0.254583 0.837573 0.0824937
+0.458622 0.285189 0.830384 0.137097
+0.438987 0.314574 0.81964 0.191113
+0.417473 0.342612 0.805385 0.244311
+0.394172 0.369182 0.787682 0.296463
+0.369182 0.394172 0.766606 0.347345
+0.342612 0.417473 0.742247 0.396739
+0.314574 0.438987 0.71471 0.444435
+0.285189 0.458622 0.684112 0.490228
+0.254583 0.476292 0.650585 0.533922
+0.222887 0.491923 0.614272 0.575329
+0.190237 0.505447 0.575329 0.614272
+0.156772 0.516807 0.533921 0.650585
+0.122635 0.525954 0.490228 0.684112
+0.0879735 0.532848 0.444435 0.71471
+0.0529352 0.537461 0.396739 0.742247
+0.0176703 0.539773 0.347345 0.766606
+-0.0176704 0.539773 0.296463 0.787682
+-0.0529354 0.537461 0.244311 0.805385
+-0.0879736 0.532848 0.191113 0.81964
+-0.122635 0.525954 0.137097 0.830384
+-0.156772 0.516807 0.0824936 0.837573
+-0.190237 0.505447 0.0275371 0.841175
+-0.222887 0.491923 -0.0275373 0.841175
+-0.254583 0.476292 -0.0824936 0.837573
+-0.285189 0.458622 -0.137097 0.830384
+-0.314574 0.438987 -0.191113 0.81964
+-0.342612 0.417473 -0.244311 0.805385
+-0.369182 0.394172 -0.296463 0.787682
+-0.394172 0.369182 -0.347345 0.766606
+-0.417473 0.342612 -0.39674 0.742247
+-0.438987 0.314574 -0.444435 0.71471
+-0.458622 0.285189 -0.490228 0.684112
+-0.476292 0.254583 -0.533921 0.650585
+-0.491923 0.222887 -0.575329 0.614272
+-0.505447 0.190237 -0.614272 0.575329
+-0.516807 0.156772 -0.650585 0.533922
+-0.525954 0.122635 -0.684112 0.490228
+-0.532848 0.0879736 -0.71471 0.444435
+-0.537461 0.0529353 -0.742247 0.396739
+-0.539773 0.0176704 -0.766606 0.347345
+0.255019 0.00834844 0.182422 -0.949536
+0.253927 0.0250096 0.244134 -0.935572
+0.251747 0.0415636 0.304801 -0.917601
+0.24849 0.0579397 0.364162 -0.895702
+0.244168 0.0740676 0.421964 -0.869967
+0.238801 0.0898784 0.477959 -0.840506
+0.232412 0.105304 0.531908 -0.807447
+0.225027 0.120279 0.583578 -0.770929
+0.216678 0.134739 0.63275 -0.731111
+0.207402 0.148622 0.679212 -0.688162
+0.197238 0.161869 0.722766 -0.642266
+0.186229 0.174422 0.763225 -0.593619
+0.174422 0.186229 0.800415 -0.542431
+0.161869 0.197238 0.834178 -0.48892
+0.148622 0.207402 0.864369 -0.433315
+0.134739 0.216678 0.890858 -0.375855
+0.120279 0.225027 0.913533 -0.316786
+0.105304 0.232412 0.932296 -0.256359
+0.0898784 0.238801 0.947067 -0.194835
+0.0740676 0.244168 0.957782 -0.132477
+0.0579397 0.24849 0.964395 -0.0695517
+0.0415636 0.251747 0.966879 -0.00632817
+0.0250096 0.253927 0.965223 0.0569223
+0.00834842 0.255019 0.959434 0.119929
+-0.00834847 0.255019 0.949536 0.182422
+-0.0250096 0.253927 0.935572 0.244134
+-0.0415636 0.251747 0.917601 0.304801
+-0.0579397 0.24849 0.895702 0.364162
+-0.0740677 0.244168 0.869967 0.421964
+-0.0898785 0.238801 0.840506 0.477959
+-0.105304 0.232412 0.807447 0.531908
+-0.120279 0.225027 0.770929 0.583578
+-0.134739 0.216678 0.731111 0.63275
+-0.148622 0.207402 0.688162 0.679212
+-0.161869 0.197238 0.642266 0.722766
+-0.174422 0.186229 0.593619 0.763225
+-0.186229 0.174422 0.542431 0.800415
+-0.197238 0.161869 0.48892 0.834178
+-0.207402 0.148622 0.433316 0.864369
+-0.216678 0.134739 0.375855 0.890858
+-0.225027 0.120279 0.316786 0.913533
+-0.232412 0.105304 0.256359 0.932296
+-0.238801 0.0898784 0.194835 0.947067
+-0.244168 0.0740676 0.132477 0.957782
+-0.24849 0.0579397 0.0695516 0.964395
+-0.251747 0.0415636 0.00632836 0.966879
+-0.253927 0.0250096 -0.0569222 0.965223
+-0.255019 0.00834847 -0.119929 0.959434
+0.306022 0.0100181 0.392885 -0.867117
+0.304712 0.0300115 0.448756 -0.839564
+0.302097 0.0498763 0.502706 -0.808416
+0.298188 0.0695276 0.554502 -0.773807
+0.293002 0.0888812 0.603924 -0.735884
+0.286561 0.107854 0.65076 -0.69481
+0.278894 0.126365 0.69481 -0.65076
+0.270032 0.144335 0.735884 -0.603924
+0.260014 0.161687 0.773807 -0.554502
+0.248882 0.178347 0.808416 -0.502706
+0.236685 0.194242 0.839564 -0.448756
+0.223474 0.209307 0.867117 -0.392885
+0.209307 0.223474 0.890956 -0.335332
+0.194242 0.236685 0.91098 -0.276343
+0.178347 0.248882 0.927103 -0.21617
+0.161687 0.260014 0.939256 -0.155072
+0.144335 0.270032 0.947388 -0.0933095
+0.126365 0.278894 0.951462 -0.0311476
+0.107854 0.286562 0.951462 0.0311477
+0.0888812 0.293002 0.947388 0.0933096
+0.0695276 0.298188 0.939256 0.155072
+0.0498763 0.302097 0.927103 0.21617
+0.0300115 0.304712 0.91098 0.276343
+0.0100181 0.306022 0.890956 0.335332
+-0.0100182 0.306022 0.867116 0.392885
+-0.0300115 0.304712 0.839564 0.448756
+-0.0498764 0.302097 0.808416 0.502706
+-0.0695276 0.298188 0.773807 0.554502
+-0.0888812 0.293002 0.735884 0.603925
+-0.107854 0.286561 0.69481 0.650761
+-0.126365 0.278894 0.65076 0.69481
+-0.144335 0.270032 0.603924 0.735884
+-0.161687 0.260014 0.554502 0.773807
+-0.178347 0.248882 0.502706 0.808416
+-0.194242 0.236685 0.448756 0.839564
+-0.209307 0.223474 0.392885 0.867117
+-0.223474 0.209307 0.335332 0.890956
+-0.236685 0.194242 0.276343 0.91098
+-0.248882 0.178347 0.21617 0.927103
+-0.260014 0.161687 0.155072 0.939256
+-0.270032 0.144335 0.0933096 0.947388
+-0.278894 0.126365 0.0311476 0.951462
+-0.286562 0.107854 -0.0311477 0.951462
+-0.293002 0.0888812 -0.0933095 0.947388
+-0.298188 0.0695276 -0.155072 0.939256
+-0.302097 0.0498764 -0.21617 0.927103
+-0.304712 0.0300115 -0.276343 0.91098
+-0.306022 0.0100182 -0.335332 0.890956
+0.306022 0.0100181 0.155072 -0.939256
+0.304712 0.0300115 0.21617 -0.927103
+0.302097 0.0498763 0.276343 -0.91098
+0.298188 0.0695276 0.335332 -0.890956
+0.293002 0.0888812 0.392885 -0.867117
+0.286561 0.107854 0.448756 -0.839564
+0.278894 0.126365 0.502706 -0.808416
+0.270032 0.144335 0.554502 -0.773807
+0.260014 0.161687 0.603924 -0.735884
+0.248882 0.178347 0.65076 -0.69481
+0.236685 0.194242 0.69481 -0.65076
+0.223474 0.209307 0.735884 -0.603924
+0.209307 0.223474 0.773807 -0.554502
+0.194242 0.236685 0.808416 -0.502706
+0.178347 0.248882 0.839564 -0.448756
+0.161687 0.260014 0.867117 -0.392885
+0.144335 0.270032 0.890956 -0.335332
+0.126365 0.278894 0.91098 -0.276343
+0.107854 0.286562 0.927103 -0.21617
+0.0888812 0.293002 0.939256 -0.155072
+0.0695276 0.298188 0.947388 -0.0933095
+0.0498763 0.302097 0.951462 -0.0311475
+0.0300115 0.304712 0.951462 0.0311478
+0.0100181 0.306022 0.947388 0.0933096
+-0.0100182 0.306022 0.939256 0.155072
+-0.0300115 0.304712 0.927103 0.21617
+-0.0498764 0.302097 0.91098 0.276343
+-0.0695276 0.298188 0.890956 0.335332
+-0.0888812 0.293002 0.867116 0.392886
+-0.107854 0.286561 0.839564 0.448756
+-0.126365 0.278894 0.808416 0.502706
+-0.144335 0.270032 0.773807 0.554502
+-0.161687 0.260014 0.735884 0.603924
+-0.178347 0.248882 0.69481 0.650761
+-0.194242 0.236685 0.650761 0.69481
+-0.209307 0.223474 0.603924 0.735884
+-0.223474 0.209307 0.554502 0.773807
+-0.236685 0.194242 0.502706 0.808416
+-0.248882 0.178347 0.448756 0.839564
+-0.260014 0.161687 0.392885 0.867117
+-0.270032 0.144335 0.335332 0.890956
+-0.278894 0.126365 0.276343 0.91098
+-0.286562 0.107854 0.21617 0.927103
+-0.293002 0.0888812 0.155072 0.939256
+-0.298188 0.0695276 0.0933095 0.947388
+-0.302097 0.0498764 0.0311477 0.951462
+-0.304712 0.0300115 -0.0311477 0.951462
+-0.306022 0.0100182 -0.0933095 0.947388
+0.357026 0.0116878 0.337168 -0.871042
+0.355497 0.0350134 0.393415 -0.847125
+0.352446 0.0581891 0.447977 -0.819581
+0.347886 0.0811156 0.500621 -0.788527
+0.341836 0.103695 0.551121 -0.754096
+0.334322 0.12583 0.599262 -0.716437
+0.325376 0.147426 0.644836 -0.675709
+0.315037 0.168391 0.687649 -0.632088
+0.30335 0.188635 0.727517 -0.58576
+0.290363 0.208071 0.76427 -0.536924
+0.276133 0.226616 0.79775 -0.485789
+0.26072 0.244191 0.827814 -0.432574
+0.244191 0.26072 0.854333 -0.377506
+0.226616 0.276133 0.877194 -0.320821
+0.208071 0.290363 0.896299 -0.262763
+0.188635 0.30335 0.911565 -0.20358
+0.168391 0.315038 0.922928 -0.143525
+0.147426 0.325376 0.930339 -0.0828551
+0.12583 0.334322 0.933766 -0.0218307
+0.103695 0.341836 0.933195 0.0392873
+0.0811155 0.347886 0.928627 0.100237
+0.058189 0.352446 0.920083 0.160758
+0.0350134 0.355497 0.907599 0.22059
+0.0116878 0.357026 0.891229 0.279477
+-0.0116879 0.357026 0.871042 0.337168
+-0.0350134 0.355497 0.847125 0.393415
+-0.0581891 0.352446 0.819581 0.447977
+-0.0811156 0.347886 0.788527 0.500621
+-0.103695 0.341836 0.754096 0.551121
+-0.12583 0.334322 0.716436 0.599262
+-0.147426 0.325376 0.675709 0.644836
+-0.168391 0.315038 0.632088 0.687649
+-0.188635 0.30335 0.58576 0.727517
+-0.208071 0.290363 0.536924 0.76427
+-0.226616 0.276133 0.485789 0.79775
+-0.244191 0.26072 0.432574 0.827814
+-0.26072 0.244191 0.377506 0.854333
+-0.276133 0.226616 0.320821 0.877194
+-0.290363 0.208071 0.262763 0.896299
+-0.30335 0.188635 0.20358 0.911565
+-0.315037 0.168391 0.143525 0.922928
+-0.325376 0.147426 0.0828552 0.930339
+-0.334322 0.12583 0.0218306 0.933766
+-0.341836 0.103695 -0.0392872 0.933195
+-0.347886 0.0811155 -0.100237 0.928627
+-0.352446 0.0581891 -0.160757 0.920083
+-0.355497 0.0350134 -0.22059 0.907599
+-0.357026 0.0116879 -0.279477 0.891229
+0.357026 0.0116878 0.522539 -0.774176
+0.355497 0.0350134 0.572054 -0.738343
+0.352446 0.0581891 0.619119 -0.699348
+0.347886 0.0811156 0.663533 -0.657358
+0.341836 0.103695 0.705106 -0.612553
+0.334322 0.12583 0.743659 -0.565126
+0.325376 0.147426 0.779028 -0.515278
+0.315037 0.168391 0.811061 -0.463224
+0.30335 0.188635 0.83962 -0.409186
+0.290363 0.208071 0.864585 -0.353396
+0.276133 0.226616 0.885847 -0.296093
+0.26072 0.244191 0.903316 -0.237522
+0.244191 0.26072 0.916916 -0.177934
+0.226616 0.276133 0.92659 -0.117584
+0.208071 0.290363 0.932297 -0.05673
+0.188635 0.30335 0.934011 0.00436661
+0.168391 0.315038 0.931726 0.0654445
+0.147426 0.325376 0.925451 0.126242
+0.12583 0.334322 0.915212 0.186499
+0.103695 0.341836 0.901055 0.245958
+0.0811155 0.347886 0.88304 0.304363
+0.058189 0.352446 0.861243 0.361465
+0.0350134 0.355497 0.835758 0.417019
+0.0116878 0.357026 0.806694 0.470787
+-0.0116879 0.357026 0.774176 0.522539
+-0.0350134 0.355497 0.738343 0.572054
+-0.0581891 0.352446 0.699348 0.619119
+-0.0811156 0.347886 0.657358 0.663533
+-0.103695 0.341836 0.612553 0.705106
+-0.12583 0.334322 0.565126 0.743659
+-0.147426 0.325376 0.515278 0.779028
+-0.168391 0.315038 0.463224 0.811061
+-0.188635 0.30335 0.409186 0.83962
+-0.208071 0.290363 0.353396 0.864585
+-0.226616 0.276133 0.296093 0.885847
+-0.244191 0.26072 0.237522 0.903316
+-0.26072 0.244191 0.177934 0.916916
+-0.276133 0.226616 0.117584 0.92659
+-0.290363 0.208071 0.0567302 0.932297
+-0.30335 0.188635 -0.00436654 0.934011
+-0.315037 0.168391 -0.0654443 0.931726
+-0.325376 0.147426 -0.126242 0.925451
+-0.334322 0.12583 -0.186499 0.915212
+-0.341836 0.103695 -0.245958 0.901055
+-0.347886 0.0811155 -0.304363 0.88304
+-0.352446 0.0581891 -0.361465 0.861243
+-0.355497 0.0350134 -0.417019 0.835758
+-0.357026 0.0116879 -0.470787 0.806694
+0.40803 0.0133575 0.601898 -0.686333
+0.406282 0.0400153 0.645497 -0.645497
+0.402795 0.0665018 0.686333 -0.601898
+0.397584 0.0927035 0.724229 -0.555721
+0.390669 0.118508 0.759024 -0.507164
+0.382082 0.143805 0.790569 -0.456435
+0.371859 0.168487 0.818729 -0.403752
+0.360043 0.192447 0.843383 -0.349341
+0.346685 0.215583 0.864425 -0.293433
+0.331843 0.237796 0.881766 -0.236268
+0.31558 0.25899 0.89533 -0.178092
+0.297966 0.279075 0.905061 -0.119154
+0.279075 0.297966 0.910916 -0.0597046
+0.25899 0.31558 0.912871 2.0411e-09
+0.237796 0.331843 0.910916 0.0597047
+0.215583 0.346685 0.905061 0.119154
+0.192447 0.360043 0.89533 0.178092
+0.168487 0.371859 0.881766 0.236268
+0.143805 0.382082 0.864425 0.293433
+0.118508 0.390669 0.843383 0.349341
+0.0927035 0.397584 0.818729 0.403753
+0.0665017 0.402795 0.790569 0.456436
+0.0400153 0.406282 0.759024 0.507164
+0.0133575 0.40803 0.724229 0.555721
+-0.0133575 0.40803 0.686333 0.601898
+-0.0400154 0.406282 0.645497 0.645497
+-0.0665018 0.402795 0.601898 0.686333
+-0.0927035 0.397584 0.555721 0.724229
+-0.118508 0.390669 0.507164 0.759025
+-0.143806 0.382082 0.456435 0.790569
+-0.168487 0.371859 0.403752 0.818729
+-0.192447 0.360043 0.349341 0.843383
+-0.215583 0.346685 0.293433 0.864425
+-0.237796 0.331843 0.236268 0.881766
+-0.25899 0.31558 0.178092 0.89533
+-0.279075 0.297966 0.119154 0.905061
+-0.297966 0.279075 0.0597047 0.910916
+-0.31558 0.25899 -4.1944e-08 0.912871
+-0.331843 0.237796 -0.0597045 0.910916
+-0.346685 0.215583 -0.119154 0.905061
+-0.360043 0.192447 -0.178092 0.89533
+-0.371859 0.168487 -0.236268 0.881766
+-0.382082 0.143805 -0.293433 0.864425
+-0.390669 0.118508 -0.349341 0.843383
+-0.397584 0.0927035 -0.403753 0.818729
+-0.402795 0.0665018 -0.456435 0.790569
+-0.406282 0.0400153 -0.507164 0.759024
+-0.40803 0.0133575 -0.555721 0.724229
+0.40803 0.0133575 0.456435 -0.790569
+0.406282 0.0400153 0.507164 -0.759024
+0.402795 0.0665018 0.555721 -0.724229
+0.397584 0.0927035 0.601898 -0.686333
+0.390669 0.118508 0.645497 -0.645497
+0.382082 0.143805 0.686333 -0.601898
+0.371859 0.168487 0.724229 -0.555721
+0.360043 0.192447 0.759024 -0.507164
+0.346685 0.215583 0.790569 -0.456435
+0.331843 0.237796 0.818729 -0.403752
+0.31558 0.25899 0.843383 -0.349341
+0.297966 0.279075 0.864425 -0.293433
+0.279075 0.297966 0.881766 -0.236268
+0.25899 0.31558 0.89533 -0.178092
+0.237796 0.331843 0.905061 -0.119154
+0.215583 0.346685 0.910916 -0.0597045
+0.192447 0.360043 0.912871 7.50431e-08
+0.168487 0.371859 0.910916 0.0597047
+0.143805 0.382082 0.905061 0.119154
+0.118508 0.390669 0.89533 0.178092
+0.0927035 0.397584 0.881766 0.236268
+0.0665017 0.402795 0.864425 0.293433
+0.0400153 0.406282 0.843383 0.349341
+0.0133575 0.40803 0.818729 0.403753
+-0.0133575 0.40803 0.790569 0.456436
+-0.0400154 0.406282 0.759024 0.507164
+-0.0665018 0.402795 0.724229 0.555721
+-0.0927035 0.397584 0.686333 0.601898
+-0.118508 0.390669 0.645497 0.645497
+-0.143806 0.382082 0.601898 0.686333
+-0.168487 0.371859 0.555721 0.724229
+-0.192447 0.360043 0.507164 0.759024
+-0.215583 0.346685 0.456435 0.790569
+-0.237796 0.331843 0.403752 0.818729
+-0.25899 0.31558 0.349341 0.843383
+-0.279075 0.297966 0.293433 0.864425
+-0.297966 0.279075 0.236268 0.881766
+-0.31558 0.25899 0.178092 0.89533
+-0.331843 0.237796 0.119154 0.905061
+-0.346685 0.215583 0.0597046 0.910916
+-0.360043 0.192447 1.02699e-07 0.912871
+-0.371859 0.168487 -0.0597046 0.910916
+-0.382082 0.143805 -0.119154 0.905061
+-0.390669 0.118508 -0.178092 0.89533
+-0.397584 0.0927035 -0.236268 0.881766
+-0.402795 0.0665018 -0.293433 0.864425
+-0.406282 0.0400153 -0.349341 0.843383
+-0.40803 0.0133575 -0.403752 0.818729
+0.456191 0.0149342 0.518263 -0.723236
+0.454238 0.0447385 0.564456 -0.687791
+0.450339 0.0743513 0.608231 -0.649401
+0.444512 0.103646 0.649401 -0.608231
+0.436782 0.132496 0.687791 -0.564456
+0.427181 0.160779 0.723236 -0.518263
+0.415751 0.188374 0.755583 -0.469852
+0.40254 0.215162 0.784695 -0.419428
+0.387606 0.241029 0.810447 -0.367209
+0.371012 0.265863 0.832728 -0.313417
+0.352829 0.28956 0.851444 -0.258283
+0.333136 0.312016 0.866513 -0.202043
+0.312016 0.333136 0.877872 -0.144937
+0.28956 0.352829 0.885472 -0.0872114
+0.265863 0.371012 0.88928 -0.029112
+0.241029 0.387606 0.88928 0.0291121
+0.215162 0.40254 0.885472 0.0872115
+0.188374 0.415751 0.877872 0.144937
+0.160779 0.427181 0.866513 0.202043
+0.132496 0.436782 0.851444 0.258283
+0.103646 0.444512 0.832728 0.313417
+0.0743512 0.450339 0.810447 0.367209
+0.0447384 0.454238 0.784695 0.419428
+0.0149341 0.456191 0.755583 0.469852
+-0.0149342 0.456191 0.723236 0.518263
+-0.0447385 0.454238 0.687791 0.564456
+-0.0743513 0.450339 0.649401 0.608231
+-0.103646 0.444512 0.608231 0.649401
+-0.132496 0.436781 0.564455 0.687791
+-0.160779 0.427181 0.518263 0.723236
+-0.188374 0.415751 0.469852 0.755583
+-0.215162 0.40254 0.419428 0.784695
+-0.241029 0.387606 0.367209 0.810447
+-0.265864 0.371012 0.313417 0.832728
+-0.28956 0.352829 0.258283 0.851444
+-0.312016 0.333136 0.202043 0.866513
+-0.333136 0.312016 0.144937 0.877872
+-0.352829 0.28956 0.0872113 0.885472
+-0.371012 0.265864 0.0291121 0.88928
+-0.387606 0.241029 -0.029112 0.88928
+-0.40254 0.215162 -0.0872113 0.885472
+-0.415751 0.188374 -0.144937 0.877872
+-0.427181 0.160779 -0.202043 0.866513
+-0.436782 0.132496 -0.258283 0.851444
+-0.444512 0.103646 -0.313417 0.832728
+-0.450339 0.0743513 -0.367209 0.810447
+-0.454238 0.0447385 -0.419428 0.784695
+-0.456191 0.0149342 -0.469852 0.755583
+0.357026 0.0116878 0.134889 -0.92423
+0.355497 0.0350134 0.195048 -0.913429
+0.352446 0.0581891 0.254372 -0.898716
+0.347886 0.0811156 0.312606 -0.880155
+0.341836 0.103695 0.369501 -0.857825
+0.334322 0.12583 0.424815 -0.831822
+0.325376 0.147426 0.478309 -0.802257
+0.315037 0.168391 0.529755 -0.769256
+0.30335 0.188635 0.578933 -0.732962
+0.290363 0.208071 0.625631 -0.693528
+0.276133 0.226616 0.66965 -0.651125
+0.26072 0.244191 0.710802 -0.605934
+0.244191 0.26072 0.74891 -0.558148
+0.226616 0.276133 0.783811 -0.507972
+0.208071 0.290363 0.815356 -0.45562
+0.188635 0.30335 0.84341 -0.401318
+0.168391 0.315038 0.867851 -0.345297
+0.147426 0.325376 0.888577 -0.287798
+0.12583 0.334322 0.905497 -0.229066
+0.103695 0.341836 0.91854 -0.169353
+0.0811155 0.347886 0.927649 -0.108915
+0.058189 0.352446 0.932787 -0.0480106
+0.0350134 0.355497 0.933929 0.0130993
+0.0116878 0.357026 0.931073 0.0741532
+-0.0116879 0.357026 0.92423 0.134889
+-0.0350134 0.355497 0.913429 0.195048
+-0.0581891 0.352446 0.898716 0.254372
+-0.0811156 0.347886 0.880155 0.312606
+-0.103695 0.341836 0.857825 0.369502
+-0.12583 0.334322 0.831822 0.424815
+-0.147426 0.325376 0.802257 0.478309
+-0.168391 0.315038 0.769257 0.529755
+-0.188635 0.30335 0.732962 0.578933
+-0.208071 0.290363 0.693528 0.625631
+-0.226616 0.276133 0.651125 0.66965
+-0.244191 0.26072 0.605934 0.710802
+-0.26072 0.244191 0.558148 0.74891
+-0.276133 0.226616 0.507972 0.783812
+-0.290363 0.208071 0.455621 0.815356
+-0.30335 0.188635 0.401318 0.84341
+-0.315037 0.168391 0.345297 0.867851
+-0.325376 0.147426 0.287798 0.888577
+-0.334322 0.12583 0.229066 0.905497
+-0.341836 0.103695 0.169353 0.91854
+-0.347886 0.0811155 0.108915 0.927649
+-0.352446 0.0581891 0.0480108 0.932787
+-0.355497 0.0350134 -0.0130992 0.933929
+-0.357026 0.0116879 -0.074153 0.931073
+0.40803 0.0133575 0.293433 -0.864425
+0.406282 0.0400153 0.349341 -0.843383
+0.402795 0.0665018 0.403752 -0.818729
+0.397584 0.0927035 0.456435 -0.790569
+0.390669 0.118508 0.507164 -0.759024
+0.382082 0.143805 0.555721 -0.724229
+0.371859 0.168487 0.601898 -0.686333
+0.360043 0.192447 0.645497 -0.645497
+0.346685 0.215583 0.686333 -0.601898
+0.331843 0.237796 0.724229 -0.555721
+0.31558 0.25899 0.759024 -0.507164
+0.297966 0.279075 0.790569 -0.456435
+0.279075 0.297966 0.818729 -0.403752
+0.25899 0.31558 0.843383 -0.349341
+0.237796 0.331843 0.864425 -0.293433
+0.215583 0.346685 0.881766 -0.236268
+0.192447 0.360043 0.89533 -0.178092
+0.168487 0.371859 0.905061 -0.119154
+0.143805 0.382082 0.910916 -0.0597046
+0.118508 0.390669 0.912871 3.92225e-08
+0.0927035 0.397584 0.910916 0.0597046
+0.0665017 0.402795 0.905061 0.119154
+0.0400153 0.406282 0.89533 0.178092
+0.0133575 0.40803 0.881766 0.236268
+-0.0133575 0.40803 0.864425 0.293433
+-0.0400154 0.406282 0.843383 0.349341
+-0.0665018 0.402795 0.818729 0.403753
+-0.0927035 0.397584 0.790569 0.456436
+-0.118508 0.390669 0.759024 0.507164
+-0.143806 0.382082 0.724229 0.555721
+-0.168487 0.371859 0.686333 0.601898
+-0.192447 0.360043 0.645497 0.645497
+-0.215583 0.346685 0.601898 0.686333
+-0.237796 0.331843 0.555721 0.724229
+-0.25899 0.31558 0.507164 0.759024
+-0.279075 0.297966 0.456435 0.790569
+-0.297966 0.279075 0.403753 0.818729
+-0.31558 0.25899 0.349341 0.843383
+-0.331843 0.237796 0.293433 0.864425
+-0.346685 0.215583 0.236268 0.881766
+-0.360043 0.192447 0.178092 0.89533
+-0.371859 0.168487 0.119154 0.905061
+-0.382082 0.143805 0.0597045 0.910916
+-0.390669 0.118508 2.96973e-08 0.912871
+-0.397584 0.0927035 -0.0597047 0.910916
+-0.402795 0.0665018 -0.119154 0.905061
+-0.406282 0.0400153 -0.178092 0.89533
+-0.40803 0.0133575 -0.236268 0.881766
+0.40803 0.0133575 0.119154 -0.905061
+0.406282 0.0400153 0.178092 -0.89533
+0.402795 0.0665018 0.236268 -0.881766
+0.397584 0.0927035 0.293433 -0.864425
+0.390669 0.118508 0.349341 -0.843383
+0.382082 0.143805 0.403752 -0.818729
+0.371859 0.168487 0.456435 -0.790569
+0.360043 0.192447 0.507164 -0.759024
+0.346685 0.215583 0.555721 -0.724229
+0.331843 0.237796 0.601898 -0.686333
+0.31558 0.25899 0.645497 -0.645497
+0.297966 0.279075 0.686333 -0.601898
+0.279075 0.297966 0.724229 -0.555721
+0.25899 0.31558 0.759024 -0.507164
+0.237796 0.331843 0.790569 -0.456435
+0.215583 0.346685 0.818729 -0.403752
+0.192447 0.360043 0.843383 -0.349341
+0.168487 0.371859 0.864425 -0.293433
+0.143805 0.382082 0.881766 -0.236268
+0.118508 0.390669 0.89533 -0.178092
+0.0927035 0.397584 0.905061 -0.119154
+0.0665017 0.402795 0.910916 -0.0597045
+0.0400153 0.406282 0.912871 1.12225e-07
+0.0133575 0.40803 0.910916 0.0597047
+-0.0133575 0.40803 0.905061 0.119154
+-0.0400154 0.406282 0.89533 0.178092
+-0.0665018 0.402795 0.881766 0.236268
+-0.0927035 0.397584 0.864425 0.293433
+-0.118508 0.390669 0.843383 0.349341
+-0.143806 0.382082 0.818729 0.403753
+-0.168487 0.371859 0.790569 0.456436
+-0.192447 0.360043 0.759024 0.507164
+-0.215583 0.346685 0.724229 0.555721
+-0.237796 0.331843 0.686333 0.601898
+-0.25899 0.31558 0.645497 0.645497
+-0.279075 0.297966 0.601898 0.686333
+-0.297966 0.279075 0.555721 0.724229
+-0.31558 0.25899 0.507164 0.759024
+-0.331843 0.237796 0.456436 0.790569
+-0.346685 0.215583 0.403752 0.818729
+-0.360043 0.192447 0.349341 0.843383
+-0.371859 0.168487 0.293433 0.864425
+-0.382082 0.143805 0.236268 0.881766
+-0.390669 0.118508 0.178092 0.89533
+-0.397584 0.0927035 0.119153 0.905061
+-0.402795 0.0665018 0.0597047 0.910916
+-0.406282 0.0400153 -4.33047e-08 0.912871
+-0.40803 0.0133575 -0.0597045 0.910916
+0.456191 0.0149342 0.202043 -0.866513
+0.454238 0.0447385 0.258283 -0.851444
+0.450339 0.0743513 0.313417 -0.832728
+0.444512 0.103646 0.367209 -0.810447
+0.436782 0.132496 0.419428 -0.784695
+0.427181 0.160779 0.469852 -0.755583
+0.415751 0.188374 0.518263 -0.723236
+0.40254 0.215162 0.564456 -0.687791
+0.387606 0.241029 0.608231 -0.649401
+0.371012 0.265863 0.649401 -0.608231
+0.352829 0.28956 0.687791 -0.564456
+0.333136 0.312016 0.723236 -0.518263
+0.312016 0.333136 0.755583 -0.469852
+0.28956 0.352829 0.784695 -0.419428
+0.265863 0.371012 0.810447 -0.367209
+0.241029 0.387606 0.832728 -0.313417
+0.215162 0.40254 0.851444 -0.258283
+0.188374 0.415751 0.866513 -0.202043
+0.160779 0.427181 0.877872 -0.144937
+0.132496 0.436782 0.885472 -0.0872114
+0.103646 0.444512 0.88928 -0.029112
+0.0743512 0.450339 0.88928 0.0291121
+0.0447384 0.454238 0.885472 0.0872115
+0.0149341 0.456191 0.877872 0.144937
+-0.0149342 0.456191 0.866513 0.202043
+-0.0447385 0.454238 0.851444 0.258283
+-0.0743513 0.450339 0.832728 0.313417
+-0.103646 0.444512 0.810447 0.367209
+-0.132496 0.436781 0.784695 0.419428
+-0.160779 0.427181 0.755583 0.469852
+-0.188374 0.415751 0.723236 0.518263
+-0.215162 0.40254 0.687791 0.564455
+-0.241029 0.387606 0.649401 0.608231
+-0.265864 0.371012 0.608231 0.649401
+-0.28956 0.352829 0.564456 0.687791
+-0.312016 0.333136 0.518263 0.723236
+-0.333136 0.312016 0.469852 0.755583
+-0.352829 0.28956 0.419428 0.784695
+-0.371012 0.265864 0.367209 0.810447
+-0.387606 0.241029 0.313417 0.832728
+-0.40254 0.215162 0.258283 0.851444
+-0.415751 0.188374 0.202043 0.866513
+-0.427181 0.160779 0.144937 0.877872
+-0.436782 0.132496 0.0872114 0.885472
+-0.444512 0.103646 0.029112 0.88928
+-0.450339 0.0743513 -0.029112 0.88928
+-0.454238 0.0447385 -0.0872114 0.885472
+-0.456191 0.0149342 -0.144937 0.877872
+0.456191 0.0149342 0.367209 -0.810447
+0.454238 0.0447385 0.419428 -0.784695
+0.450339 0.0743513 0.469852 -0.755583
+0.444512 0.103646 0.518263 -0.723236
+0.436782 0.132496 0.564456 -0.687791
+0.427181 0.160779 0.608231 -0.649401
+0.415751 0.188374 0.649401 -0.608231
+0.40254 0.215162 0.687791 -0.564456
+0.387606 0.241029 0.723236 -0.518263
+0.371012 0.265863 0.755583 -0.469852
+0.352829 0.28956 0.784695 -0.419428
+0.333136 0.312016 0.810447 -0.367209
+0.312016 0.333136 0.832728 -0.313417
+0.28956 0.352829 0.851444 -0.258283
+0.265863 0.371012 0.866513 -0.202043
+0.241029 0.387606 0.877872 -0.144937
+0.215162 0.40254 0.885472 -0.0872113
+0.188374 0.415751 0.88928 -0.029112
+0.160779 0.427181 0.88928 0.0291121
+0.132496 0.436782 0.885472 0.0872114
+0.103646 0.444512 0.877872 0.144937
+0.0743512 0.450339 0.866513 0.202043
+0.0447384 0.454238 0.851444 0.258283
+0.0149341 0.456191 0.832728 0.313417
+-0.0149342 0.456191 0.810447 0.367209
+-0.0447385 0.454238 0.784695 0.419428
+-0.0743513 0.450339 0.755583 0.469852
+-0.103646 0.444512 0.723236 0.518263
+-0.132496 0.436781 0.687791 0.564456
+-0.160779 0.427181 0.649401 0.608231
+-0.188374 0.415751 0.608231 0.649401
+-0.215162 0.40254 0.564456 0.687791
+-0.241029 0.387606 0.518263 0.723236
+-0.265864 0.371012 0.469852 0.755583
+-0.28956 0.352829 0.419428 0.784695
+-0.312016 0.333136 0.367209 0.810447
+-0.333136 0.312016 0.313417 0.832728
+-0.352829 0.28956 0.258283 0.851444
+-0.371012 0.265864 0.202043 0.866513
+-0.387606 0.241029 0.144937 0.877872
+-0.40254 0.215162 0.0872115 0.885472
+-0.415751 0.188374 0.029112 0.88928
+-0.427181 0.160779 -0.0291121 0.88928
+-0.436782 0.132496 -0.0872114 0.885472
+-0.444512 0.103646 -0.144937 0.877872
+-0.450339 0.0743513 -0.202043 0.866513
+-0.454238 0.0447385 -0.258283 0.851444
+-0.456191 0.0149342 -0.313417 0.832728
+0.499732 0.0163595 0.433013 -0.75
+0.497592 0.0490086 0.481138 -0.720074
+0.493322 0.0814477 0.527203 -0.687064
+0.486938 0.113538 0.57101 -0.651112
+0.47847 0.145142 0.612372 -0.612372
+0.467953 0.176125 0.651112 -0.57101
+0.455432 0.206354 0.687064 -0.527203
+0.440961 0.235698 0.720074 -0.481138
+0.424601 0.264034 0.75 -0.433013
+0.406423 0.291239 0.776715 -0.383033
+0.386505 0.317197 0.800103 -0.331414
+0.364932 0.341796 0.820066 -0.278375
+0.341796 0.364932 0.836516 -0.224144
+0.317197 0.386505 0.849385 -0.168953
+0.291239 0.406423 0.858616 -0.113039
+0.264034 0.424601 0.864171 -0.0566407
+0.235698 0.440961 0.866025 7.11922e-08
+0.206353 0.455432 0.864171 0.0566408
+0.176125 0.467953 0.858616 0.113039
+0.145142 0.47847 0.849385 0.168953
+0.113538 0.486938 0.836516 0.224144
+0.0814477 0.493322 0.820066 0.278375
+0.0490085 0.497592 0.800103 0.331414
+0.0163595 0.499732 0.776715 0.383033
+-0.0163596 0.499732 0.75 0.433013
+-0.0490086 0.497592 0.720074 0.481138
+-0.0814478 0.493322 0.687064 0.527203
+-0.113538 0.486938 0.651112 0.57101
+-0.145142 0.47847 0.612372 0.612373
+-0.176125 0.467953 0.57101 0.651112
+-0.206354 0.455432 0.527203 0.687064
+-0.235698 0.440961 0.481138 0.720074
+-0.264034 0.424601 0.433013 0.75
+-0.291239 0.406423 0.383033 0.776715
+-0.317197 0.386505 0.331414 0.800103
+-0.341796 0.364932 0.278375 0.820066
+-0.364932 0.341796 0.224144 0.836516
+-0.386505 0.317197 0.168953 0.849385
+-0.406423 0.291239 0.113039 0.858616
+-0.424601 0.264034 0.0566408 0.864171
+-0.440961 0.235698 9.74292e-08 0.866025
+-0.455432 0.206354 -0.0566408 0.864171
+-0.467953 0.176125 -0.113039 0.858616
+-0.47847 0.145142 -0.168953 0.849385
+-0.486938 0.113538 -0.224144 0.836516
+-0.493322 0.0814478 -0.278375 0.820066
+-0.497592 0.0490085 -0.331414 0.800103
+-0.499732 0.0163596 -0.383033 0.776715
+0.499732 0.0163595 0.278375 -0.820066
+0.497592 0.0490086 0.331414 -0.800103
+0.493322 0.0814477 0.383033 -0.776715
+0.486938 0.113538 0.433013 -0.75
+0.47847 0.145142 0.481138 -0.720074
+0.467953 0.176125 0.527203 -0.687064
+0.455432 0.206354 0.57101 -0.651112
+0.440961 0.235698 0.612372 -0.612372
+0.424601 0.264034 0.651112 -0.57101
+0.406423 0.291239 0.687064 -0.527203
+0.386505 0.317197 0.720074 -0.481138
+0.364932 0.341796 0.75 -0.433013
+0.341796 0.364932 0.776715 -0.383033
+0.317197 0.386505 0.800103 -0.331414
+0.291239 0.406423 0.820066 -0.278375
+0.264034 0.424601 0.836516 -0.224144
+0.235698 0.440961 0.849385 -0.168953
+0.206353 0.455432 0.858616 -0.113039
+0.176125 0.467953 0.864171 -0.0566407
+0.145142 0.47847 0.866025 3.72097e-08
+0.113538 0.486938 0.864171 0.0566408
+0.0814477 0.493322 0.858616 0.113039
+0.0490085 0.497592 0.849385 0.168953
+0.0163595 0.499732 0.836516 0.224144
+-0.0163596 0.499732 0.820066 0.278375
+-0.0490086 0.497592 0.800103 0.331414
+-0.0814478 0.493322 0.776715 0.383033
+-0.113538 0.486938 0.75 0.433013
+-0.145142 0.47847 0.720074 0.481138
+-0.176125 0.467953 0.687064 0.527203
+-0.206354 0.455432 0.651112 0.57101
+-0.235698 0.440961 0.612373 0.612372
+-0.264034 0.424601 0.57101 0.651112
+-0.291239 0.406423 0.527203 0.687064
+-0.317197 0.386505 0.481138 0.720074
+-0.341796 0.364932 0.433013 0.75
+-0.364932 0.341796 0.383033 0.776715
+-0.386505 0.317197 0.331414 0.800103
+-0.406423 0.291239 0.278375 0.820066
+-0.424601 0.264034 0.224144 0.836516
+-0.440961 0.235698 0.168953 0.849385
+-0.455432 0.206354 0.113039 0.858616
+-0.467953 0.176125 0.0566407 0.864171
+-0.47847 0.145142 2.81734e-08 0.866025
+-0.486938 0.113538 -0.0566408 0.864171
+-0.493322 0.0814478 -0.113039 0.858616
+-0.497592 0.0490085 -0.168953 0.849385
+-0.499732 0.0163596 -0.224144 0.836516
+0.539773 0.0176703 0.347345 -0.766606
+0.537461 0.0529353 0.396739 -0.742247
+0.532848 0.0879736 0.444435 -0.71471
+0.525954 0.122635 0.490228 -0.684112
+0.516807 0.156772 0.533922 -0.650585
+0.505447 0.190237 0.575329 -0.614272
+0.491923 0.222887 0.614272 -0.575329
+0.476292 0.254583 0.650585 -0.533921
+0.458622 0.285189 0.684112 -0.490228
+0.438987 0.314574 0.71471 -0.444435
+0.417473 0.342612 0.742247 -0.396739
+0.394172 0.369182 0.766606 -0.347345
+0.369182 0.394172 0.787682 -0.296463
+0.342612 0.417473 0.805385 -0.244311
+0.314574 0.438987 0.81964 -0.191113
+0.285189 0.458622 0.830384 -0.137097
+0.254583 0.476292 0.837573 -0.0824936
+0.222887 0.491923 0.841175 -0.0275372
+0.190237 0.505447 0.841175 0.0275373
+0.156772 0.516807 0.837573 0.0824938
+0.122635 0.525954 0.830384 0.137097
+0.0879735 0.532848 0.81964 0.191113
+0.0529352 0.537461 0.805385 0.244311
+0.0176703 0.539773 0.787682 0.296463
+-0.0176704 0.539773 0.766606 0.347345
+-0.0529354 0.537461 0.742247 0.39674
+-0.0879736 0.532848 0.71471 0.444435
+-0.122635 0.525954 0.684112 0.490228
+-0.156772 0.516807 0.650585 0.533922
+-0.190237 0.505447 0.614272 0.575329
+-0.222887 0.491923 0.575329 0.614272
+-0.254583 0.476292 0.533922 0.650585
+-0.285189 0.458622 0.490228 0.684112
+-0.314574 0.438987 0.444435 0.71471
+-0.342612 0.417473 0.396739 0.742247
+-0.369182 0.394172 0.347345 0.766606
+-0.394172 0.369182 0.296463 0.787682
+-0.417473 0.342612 0.244311 0.805385
+-0.438987 0.314574 0.191113 0.81964
+-0.458622 0.285189 0.137097 0.830384
+-0.476292 0.254583 0.0824938 0.837573
+-0.491923 0.222887 0.0275372 0.841175
+-0.505447 0.190237 -0.0275373 0.841175
+-0.516807 0.156772 -0.0824937 0.837573
+-0.525954 0.122635 -0.137097 0.830384
+-0.532848 0.0879736 -0.191113 0.81964
+-0.537461 0.0529353 -0.244311 0.805385
+-0.539773 0.0176704 -0.296463 0.787682
+0.456191 0.0149342 0.649401 -0.608231
+0.454238 0.0447385 0.687791 -0.564456
+0.450339 0.0743513 0.723236 -0.518263
+0.444512 0.103646 0.755583 -0.469852
+0.436782 0.132496 0.784695 -0.419428
+0.427181 0.160779 0.810447 -0.367209
+0.415751 0.188374 0.832728 -0.313417
+0.40254 0.215162 0.851444 -0.258283
+0.387606 0.241029 0.866513 -0.202043
+0.371012 0.265863 0.877872 -0.144937
+0.352829 0.28956 0.885472 -0.0872114
+0.333136 0.312016 0.88928 -0.029112
+0.312016 0.333136 0.88928 0.029112
+0.28956 0.352829 0.885472 0.0872114
+0.265863 0.371012 0.877872 0.144937
+0.241029 0.387606 0.866513 0.202043
+0.215162 0.40254 0.851444 0.258283
+0.188374 0.415751 0.832728 0.313417
+0.160779 0.427181 0.810447 0.367209
+0.132496 0.436782 0.784695 0.419428
+0.103646 0.444512 0.755583 0.469852
+0.0743512 0.450339 0.723236 0.518263
+0.0447384 0.454238 0.687791 0.564456
+0.0149341 0.456191 0.649401 0.608231
+-0.0149342 0.456191 0.608231 0.649401
+-0.0447385 0.454238 0.564456 0.687791
+-0.0743513 0.450339 0.518263 0.723236
+-0.103646 0.444512 0.469852 0.755583
+-0.132496 0.436781 0.419428 0.784695
+-0.160779 0.427181 0.367209 0.810447
+-0.188374 0.415751 0.313417 0.832728
+-0.215162 0.40254 0.258283 0.851444
+-0.241029 0.387606 0.202043 0.866513
+-0.265864 0.371012 0.144937 0.877872
+-0.28956 0.352829 0.0872114 0.885472
+-0.312016 0.333136 0.029112 0.88928
+-0.333136 0.312016 -0.029112 0.88928
+-0.352829 0.28956 -0.0872114 0.885472
+-0.371012 0.265864 -0.144937 0.877872
+-0.387606 0.241029 -0.202043 0.866513
+-0.40254 0.215162 -0.258283 0.851444
+-0.415751 0.188374 -0.313417 0.832728
+-0.427181 0.160779 -0.367209 0.810447
+-0.436782 0.132496 -0.419428 0.784695
+-0.444512 0.103646 -0.469852 0.755583
+-0.450339 0.0743513 -0.518263 0.723236
+-0.454238 0.0447385 -0.564456 0.687791
+-0.456191 0.0149342 -0.608231 0.649401
+0.499732 0.0163595 0.687064 -0.527203
+0.497592 0.0490086 0.720074 -0.481138
+0.493322 0.0814477 0.75 -0.433013
+0.486938 0.113538 0.776715 -0.383033
+0.47847 0.145142 0.800103 -0.331414
+0.467953 0.176125 0.820066 -0.278375
+0.455432 0.206354 0.836516 -0.224144
+0.440961 0.235698 0.849385 -0.168953
+0.424601 0.264034 0.858616 -0.113039
+0.406423 0.291239 0.864171 -0.0566408
+0.386505 0.317197 0.866025 -1.57003e-08
+0.364932 0.341796 0.864171 0.0566408
+0.341796 0.364932 0.858616 0.113039
+0.317197 0.386505 0.849385 0.168953
+0.291239 0.406423 0.836516 0.224144
+0.264034 0.424601 0.820066 0.278375
+0.235698 0.440961 0.800103 0.331414
+0.206353 0.455432 0.776715 0.383033
+0.176125 0.467953 0.75 0.433013
+0.145142 0.47847 0.720074 0.481138
+0.113538 0.486938 0.687064 0.527203
+0.0814477 0.493322 0.651112 0.57101
+0.0490085 0.497592 0.612372 0.612373
+0.0163595 0.499732 0.57101 0.651112
+-0.0163596 0.499732 0.527203 0.687064
+-0.0490086 0.497592 0.481138 0.720074
+-0.0814478 0.493322 0.433013 0.75
+-0.113538 0.486938 0.383033 0.776715
+-0.145142 0.47847 0.331413 0.800103
+-0.176125 0.467953 0.278375 0.820066
+-0.206354 0.455432 0.224144 0.836516
+-0.235698 0.440961 0.168953 0.849385
+-0.264034 0.424601 0.113039 0.858616
+-0.291239 0.406423 0.0566407 0.864171
+-0.317197 0.386505 2.94643e-08 0.866025
+-0.341796 0.364932 -0.0566408 0.864171
+-0.364932 0.341796 -0.113039 0.858616
+-0.386505 0.317197 -0.168953 0.849385
+-0.406423 0.291239 -0.224144 0.836516
+-0.424601 0.264034 -0.278375 0.820066
+-0.440961 0.235698 -0.331413 0.800103
+-0.455432 0.206354 -0.383033 0.776715
+-0.467953 0.176125 -0.433013 0.75
+-0.47847 0.145142 -0.481138 0.720074
+-0.486938 0.113538 -0.527203 0.687064
+-0.493322 0.0814478 -0.57101 0.651112
+-0.497592 0.0490085 -0.612372 0.612372
+-0.499732 0.0163596 -0.651112 0.57101
+0.499732 0.0163595 0.57101 -0.651112
+0.497592 0.0490086 0.612372 -0.612372
+0.493322 0.0814477 0.651112 -0.57101
+0.486938 0.113538 0.687064 -0.527203
+0.47847 0.145142 0.720074 -0.481138
+0.467953 0.176125 0.75 -0.433013
+0.455432 0.206354 0.776715 -0.383033
+0.440961 0.235698 0.800103 -0.331414
+0.424601 0.264034 0.820066 -0.278375
+0.406423 0.291239 0.836516 -0.224144
+0.386505 0.317197 0.849385 -0.168953
+0.364932 0.341796 0.858616 -0.113039
+0.341796 0.364932 0.864171 -0.0566408
+0.317197 0.386505 0.866025 1.93636e-09
+0.291239 0.406423 0.864171 0.0566408
+0.264034 0.424601 0.858616 0.113039
+0.235698 0.440961 0.849385 0.168953
+0.206353 0.455432 0.836516 0.224144
+0.176125 0.467953 0.820066 0.278375
+0.145142 0.47847 0.800103 0.331414
+0.113538 0.486938 0.776715 0.383033
+0.0814477 0.493322 0.75 0.433013
+0.0490085 0.497592 0.720074 0.481138
+0.0163595 0.499732 0.687064 0.527203
+-0.0163596 0.499732 0.651112 0.57101
+-0.0490086 0.497592 0.612372 0.612372
+-0.0814478 0.493322 0.57101 0.651112
+-0.113538 0.486938 0.527203 0.687064
+-0.145142 0.47847 0.481138 0.720074
+-0.176125 0.467953 0.433013 0.75
+-0.206354 0.455432 0.383033 0.776715
+-0.235698 0.440961 0.331414 0.800103
+-0.264034 0.424601 0.278375 0.820066
+-0.291239 0.406423 0.224144 0.836516
+-0.317197 0.386505 0.168953 0.849385
+-0.341796 0.364932 0.113039 0.858616
+-0.364932 0.341796 0.0566408 0.864171
+-0.386505 0.317197 -3.97915e-08 0.866025
+-0.406423 0.291239 -0.0566407 0.864171
+-0.424601 0.264034 -0.113039 0.858616
+-0.440961 0.235698 -0.168953 0.849385
+-0.455432 0.206354 -0.224144 0.836516
+-0.467953 0.176125 -0.278375 0.820066
+-0.47847 0.145142 -0.331414 0.800103
+-0.486938 0.113538 -0.383033 0.776715
+-0.493322 0.0814478 -0.433013 0.75
+-0.497592 0.0490085 -0.481138 0.720074
+-0.499732 0.0163596 -0.527203 0.687064
+0.539773 0.0176703 0.614272 -0.575329
+0.537461 0.0529353 0.650585 -0.533922
+0.532848 0.0879736 0.684112 -0.490228
+0.525954 0.122635 0.71471 -0.444435
+0.516807 0.156772 0.742247 -0.396739
+0.505447 0.190237 0.766606 -0.347345
+0.491923 0.222887 0.787682 -0.296463
+0.476292 0.254583 0.805385 -0.244311
+0.458622 0.285189 0.81964 -0.191113
+0.438987 0.314574 0.830384 -0.137097
+0.417473 0.342612 0.837573 -0.0824937
+0.394172 0.369182 0.841175 -0.0275372
+0.369182 0.394172 0.841175 0.0275372
+0.342612 0.417473 0.837573 0.0824937
+0.314574 0.438987 0.830384 0.137097
+0.285189 0.458622 0.81964 0.191113
+0.254583 0.476292 0.805385 0.244311
+0.222887 0.491923 0.787682 0.296463
+0.190237 0.505447 0.766606 0.347345
+0.156772 0.516807 0.742247 0.39674
+0.122635 0.525954 0.71471 0.444435
+0.0879735 0.532848 0.684112 0.490228
+0.0529352 0.537461 0.650585 0.533922
+0.0176703 0.539773 0.614272 0.575329
+-0.0176704 0.539773 0.575329 0.614272
+-0.0529354 0.537461 0.533921 0.650585
+-0.0879736 0.532848 0.490228 0.684112
+-0.122635 0.525954 0.444435 0.71471
+-0.156772 0.516807 0.396739 0.742247
+-0.190237 0.505447 0.347345 0.766606
+-0.222887 0.491923 0.296462 0.787682
+-0.254583 0.476292 0.244311 0.805385
+-0.285189 0.458622 0.191113 0.81964
+-0.314574 0.438987 0.137097 0.830384
+-0.342612 0.417473 0.0824937 0.837573
+-0.369182 0.394172 0.0275372 0.841175
+-0.394172 0.369182 -0.0275372 0.841175
+-0.417473 0.342612 -0.0824938 0.837573
+-0.438987 0.314574 -0.137097 0.830384
+-0.458622 0.285189 -0.191113 0.81964
+-0.476292 0.254583 -0.244311 0.805385
+-0.491923 0.222887 -0.296463 0.787682
+-0.505447 0.190237 -0.347345 0.766606
+-0.516807 0.156772 -0.396739 0.742247
+-0.525954 0.122635 -0.444435 0.71471
+-0.532848 0.0879736 -0.490228 0.684112
+-0.537461 0.0529353 -0.533922 0.650585
+-0.539773 0.0176704 -0.575329 0.614272
+0.539773 0.0176703 0.71471 -0.444435
+0.537461 0.0529353 0.742247 -0.396739
+0.532848 0.0879736 0.766606 -0.347345
+0.525954 0.122635 0.787682 -0.296463
+0.516807 0.156772 0.805385 -0.244311
+0.505447 0.190237 0.81964 -0.191113
+0.491923 0.222887 0.830384 -0.137097
+0.476292 0.254583 0.837573 -0.0824937
+0.458622 0.285189 0.841175 -0.0275372
+0.438987 0.314574 0.841175 0.0275372
+0.417473 0.342612 0.837573 0.0824937
+0.394172 0.369182 0.830384 0.137097
+0.369182 0.394172 0.81964 0.191113
+0.342612 0.417473 0.805385 0.244311
+0.314574 0.438987 0.787682 0.296463
+0.285189 0.458622 0.766606 0.347345
+0.254583 0.476292 0.742247 0.39674
+0.222887 0.491923 0.71471 0.444435
+0.190237 0.505447 0.684112 0.490228
+0.156772 0.516807 0.650585 0.533922
+0.122635 0.525954 0.614272 0.575329
+0.0879735 0.532848 0.575329 0.614272
+0.0529352 0.537461 0.533921 0.650585
+0.0176703 0.539773 0.490228 0.684112
+-0.0176704 0.539773 0.444435 0.71471
+-0.0529354 0.537461 0.396739 0.742247
+-0.0879736 0.532848 0.347345 0.766606
+-0.122635 0.525954 0.296463 0.787682
+-0.156772 0.516807 0.244311 0.805385
+-0.190237 0.505447 0.191113 0.81964
+-0.222887 0.491923 0.137097 0.830384
+-0.254583 0.476292 0.0824938 0.837573
+-0.285189 0.458622 0.0275372 0.841175
+-0.314574 0.438987 -0.0275373 0.841175
+-0.342612 0.417473 -0.0824937 0.837573
+-0.369182 0.394172 -0.137097 0.830384
+-0.394172 0.369182 -0.191113 0.81964
+-0.417473 0.342612 -0.244311 0.805385
+-0.438987 0.314574 -0.296463 0.787682
+-0.458622 0.285189 -0.347345 0.766606
+-0.476292 0.254583 -0.396739 0.742247
+-0.491923 0.222887 -0.444435 0.71471
+-0.505447 0.190237 -0.490228 0.684112
+-0.516807 0.156772 -0.533921 0.650585
+-0.525954 0.122635 -0.575329 0.614272
+-0.532848 0.0879736 -0.614272 0.575329
+-0.537461 0.0529353 -0.650585 0.533921
+-0.539773 0.0176704 -0.684112 0.490228
+0.577041 0.0188904 0.732294 -0.361127
+0.57457 0.0565902 0.754344 -0.31246
+0.569639 0.0940477 0.773165 -0.262454
+0.562268 0.131103 0.788675 -0.211325
+0.55249 0.167596 0.800808 -0.159291
+0.540346 0.203372 0.809511 -0.106574
+0.525887 0.238277 0.814748 -0.0534014
+0.509177 0.272161 0.816497 1.72366e-08
+0.490287 0.30488 0.814748 0.0534014
+0.469297 0.336294 0.809511 0.106574
+0.446298 0.366267 0.800808 0.159291
+0.421387 0.394672 0.788675 0.211325
+0.394672 0.421387 0.773165 0.262454
+0.366267 0.446298 0.754344 0.31246
+0.336294 0.469297 0.732294 0.361127
+0.30488 0.490287 0.707107 0.408248
+0.272161 0.509178 0.678892 0.453621
+0.238276 0.525887 0.64777 0.497052
+0.203372 0.540346 0.613875 0.538354
+0.167596 0.55249 0.57735 0.57735
+0.131103 0.562268 0.538354 0.613875
+0.0940477 0.569639 0.497052 0.64777
+0.0565902 0.57457 0.453621 0.678892
+0.0188903 0.577041 0.408248 0.707107
+-0.0188904 0.577041 0.361127 0.732294
+-0.0565903 0.57457 0.31246 0.754345
+-0.0940478 0.569639 0.262454 0.773165
+-0.131103 0.562268 0.211325 0.788675
+-0.167596 0.55249 0.15929 0.800808
+-0.203372 0.540346 0.106574 0.809511
+-0.238277 0.525887 0.0534013 0.814748
+-0.272161 0.509178 9.30742e-08 0.816497
+-0.30488 0.490287 -0.0534014 0.814748
+-0.336294 0.469297 -0.106574 0.809511
+-0.366267 0.446298 -0.159291 0.800808
+-0.394672 0.421387 -0.211325 0.788675
+-0.421387 0.394672 -0.262454 0.773165
+-0.446298 0.366267 -0.31246 0.754344
+-0.469297 0.336294 -0.361127 0.732294
+-0.490287 0.30488 -0.408248 0.707107
+-0.509177 0.272161 -0.453621 0.678892
+-0.525887 0.238277 -0.497052 0.64777
+-0.540346 0.203372 -0.538354 0.613875
+-0.55249 0.167596 -0.57735 0.57735
+-0.562268 0.131103 -0.613875 0.538354
+-0.569639 0.0940478 -0.64777 0.497052
+-0.57457 0.0565902 -0.678892 0.453621
+-0.577041 0.0188904 -0.707107 0.408248
+0.577041 0.0188904 0.64777 -0.497052
+0.57457 0.0565902 0.678892 -0.453621
+0.569639 0.0940477 0.707107 -0.408248
+0.562268 0.131103 0.732294 -0.361127
+0.55249 0.167596 0.754344 -0.31246
+0.540346 0.203372 0.773165 -0.262454
+0.525887 0.238277 0.788675 -0.211325
+0.509177 0.272161 0.800808 -0.159291
+0.490287 0.30488 0.809511 -0.106574
+0.469297 0.336294 0.814748 -0.0534014
+0.446298 0.366267 0.816497 -1.48024e-08
+0.421387 0.394672 0.814748 0.0534015
+0.394672 0.421387 0.809511 0.106574
+0.366267 0.446298 0.800808 0.159291
+0.336294 0.469297 0.788675 0.211325
+0.30488 0.490287 0.773165 0.262454
+0.272161 0.509178 0.754344 0.31246
+0.238276 0.525887 0.732294 0.361127
+0.203372 0.540346 0.707107 0.408248
+0.167596 0.55249 0.678892 0.453621
+0.131103 0.562268 0.64777 0.497052
+0.0940477 0.569639 0.613875 0.538354
+0.0565902 0.57457 0.57735 0.57735
+0.0188903 0.577041 0.538354 0.613875
+-0.0188904 0.577041 0.497052 0.64777
+-0.0565903 0.57457 0.453621 0.678892
+-0.0940478 0.569639 0.408248 0.707107
+-0.131103 0.562268 0.361127 0.732294
+-0.167596 0.55249 0.31246 0.754345
+-0.203372 0.540346 0.262454 0.773165
+-0.238277 0.525887 0.211325 0.788675
+-0.272161 0.509178 0.159291 0.800808
+-0.30488 0.490287 0.106574 0.809511
+-0.336294 0.469297 0.0534014 0.814748
+-0.366267 0.446298 2.77792e-08 0.816497
+-0.394672 0.421387 -0.0534015 0.814748
+-0.421387 0.394672 -0.106574 0.809511
+-0.446298 0.366267 -0.159291 0.800808
+-0.469297 0.336294 -0.211325 0.788675
+-0.490287 0.30488 -0.262454 0.773165
+-0.509177 0.272161 -0.31246 0.754345
+-0.525887 0.238277 -0.361127 0.732294
+-0.540346 0.203372 -0.408248 0.707107
+-0.55249 0.167596 -0.453621 0.678892
+-0.562268 0.131103 -0.497052 0.64777
+-0.569639 0.0940478 -0.538354 0.613875
+-0.57457 0.0565902 -0.57735 0.57735
+-0.577041 0.0188904 -0.613875 0.538354
+0.612045 0.0200363 0.671353 -0.417474
+0.609424 0.060023 0.69722 -0.372672
+0.604193 0.0997527 0.720101 -0.326274
+0.596375 0.139055 0.739899 -0.278478
+0.586004 0.177762 0.756528 -0.22949
+0.573123 0.215708 0.769917 -0.17952
+0.557788 0.25273 0.78001 -0.12878
+0.540064 0.28867 0.786763 -0.0774893
+0.520028 0.323374 0.790146 -0.0258667
+0.497765 0.356693 0.790146 0.0258667
+0.47337 0.388485 0.786763 0.0774893
+0.446949 0.418613 0.78001 0.12878
+0.418613 0.446949 0.769917 0.17952
+0.388485 0.47337 0.756528 0.22949
+0.356693 0.497765 0.739899 0.278478
+0.323374 0.520028 0.720101 0.326274
+0.28867 0.540064 0.69722 0.372672
+0.25273 0.557788 0.671353 0.417474
+0.215708 0.573123 0.642612 0.460489
+0.177762 0.586004 0.611118 0.501532
+0.139055 0.596375 0.577008 0.540427
+0.0997526 0.604193 0.540427 0.577008
+0.0600229 0.609424 0.501532 0.611118
+0.0200362 0.612045 0.460489 0.642612
+-0.0200363 0.612045 0.417474 0.671353
+-0.060023 0.609424 0.372672 0.69722
+-0.0997527 0.604193 0.326274 0.720101
+-0.139055 0.596375 0.278478 0.739899
+-0.177762 0.586004 0.22949 0.756528
+-0.215708 0.573123 0.179519 0.769917
+-0.25273 0.557788 0.12878 0.78001
+-0.28867 0.540064 0.0774894 0.786763
+-0.323374 0.520028 0.0258667 0.790146
+-0.356693 0.497765 -0.0258668 0.790146
+-0.388485 0.47337 -0.0774893 0.786763
+-0.418613 0.446949 -0.12878 0.78001
+-0.446949 0.418613 -0.17952 0.769917
+-0.47337 0.388485 -0.22949 0.756528
+-0.497765 0.356693 -0.278478 0.739899
+-0.520028 0.323374 -0.326274 0.720101
+-0.540064 0.28867 -0.372672 0.69722
+-0.557788 0.25273 -0.417474 0.671353
+-0.573123 0.215708 -0.460489 0.642612
+-0.586004 0.177762 -0.501532 0.611118
+-0.596375 0.139055 -0.540427 0.577008
+-0.604193 0.0997527 -0.577008 0.540427
+-0.609424 0.060023 -0.611118 0.501532
+-0.612045 0.0200363 -0.642612 0.460489
+0.539773 0.0176703 0.490228 -0.684112
+0.537461 0.0529353 0.533922 -0.650585
+0.532848 0.0879736 0.575329 -0.614272
+0.525954 0.122635 0.614272 -0.575329
+0.516807 0.156772 0.650585 -0.533922
+0.505447 0.190237 0.684112 -0.490228
+0.491923 0.222887 0.71471 -0.444435
+0.476292 0.254583 0.742247 -0.396739
+0.458622 0.285189 0.766606 -0.347345
+0.438987 0.314574 0.787682 -0.296463
+0.417473 0.342612 0.805385 -0.244311
+0.394172 0.369182 0.81964 -0.191113
+0.369182 0.394172 0.830384 -0.137097
+0.342612 0.417473 0.837573 -0.0824937
+0.314574 0.438987 0.841175 -0.0275372
+0.285189 0.458622 0.841175 0.0275373
+0.254583 0.476292 0.837573 0.0824938
+0.222887 0.491923 0.830384 0.137097
+0.190237 0.505447 0.81964 0.191113
+0.156772 0.516807 0.805385 0.244311
+0.122635 0.525954 0.787682 0.296463
+0.0879735 0.532848 0.766606 0.347345
+0.0529352 0.537461 0.742247 0.39674
+0.0176703 0.539773 0.71471 0.444435
+-0.0176704 0.539773 0.684112 0.490228
+-0.0529354 0.537461 0.650585 0.533922
+-0.0879736 0.532848 0.614272 0.575329
+-0.122635 0.525954 0.575329 0.614272
+-0.156772 0.516807 0.533921 0.650585
+-0.190237 0.505447 0.490228 0.684112
+-0.222887 0.491923 0.444435 0.71471
+-0.254583 0.476292 0.39674 0.742247
+-0.285189 0.458622 0.347345 0.766606
+-0.314574 0.438987 0.296463 0.787682
+-0.342612 0.417473 0.244311 0.805385
+-0.369182 0.394172 0.191113 0.81964
+-0.394172 0.369182 0.137097 0.830384
+-0.417473 0.342612 0.0824937 0.837573
+-0.438987 0.314574 0.0275373 0.841175
+-0.458622 0.285189 -0.0275372 0.841175
+-0.476292 0.254583 -0.0824936 0.837573
+-0.491923 0.222887 -0.137097 0.830384
+-0.505447 0.190237 -0.191113 0.81964
+-0.516807 0.156772 -0.244311 0.805385
+-0.525954 0.122635 -0.296463 0.787682
+-0.532848 0.0879736 -0.347345 0.766606
+-0.537461 0.0529353 -0.39674 0.742247
+-0.539773 0.0176704 -0.444435 0.71471
+0.577041 0.0188904 0.538354 -0.613875
+0.57457 0.0565902 0.57735 -0.57735
+0.569639 0.0940477 0.613875 -0.538354
+0.562268 0.131103 0.64777 -0.497052
+0.55249 0.167596 0.678892 -0.453621
+0.540346 0.203372 0.707107 -0.408248
+0.525887 0.238277 0.732294 -0.361127
+0.509177 0.272161 0.754344 -0.31246
+0.490287 0.30488 0.773165 -0.262454
+0.469297 0.336294 0.788675 -0.211325
+0.446298 0.366267 0.800808 -0.159291
+0.421387 0.394672 0.809511 -0.106574
+0.394672 0.421387 0.814748 -0.0534014
+0.366267 0.446298 0.816497 1.82562e-09
+0.336294 0.469297 0.814748 0.0534015
+0.30488 0.490287 0.809511 0.106574
+0.272161 0.509178 0.800808 0.159291
+0.238276 0.525887 0.788675 0.211325
+0.203372 0.540346 0.773165 0.262454
+0.167596 0.55249 0.754344 0.31246
+0.131103 0.562268 0.732294 0.361127
+0.0940477 0.569639 0.707107 0.408248
+0.0565902 0.57457 0.678892 0.453621
+0.0188903 0.577041 0.64777 0.497052
+-0.0188904 0.577041 0.613875 0.538354
+-0.0565903 0.57457 0.57735 0.57735
+-0.0940478 0.569639 0.538354 0.613875
+-0.131103 0.562268 0.497052 0.64777
+-0.167596 0.55249 0.453621 0.678892
+-0.203372 0.540346 0.408248 0.707107
+-0.238277 0.525887 0.361127 0.732294
+-0.272161 0.509178 0.31246 0.754344
+-0.30488 0.490287 0.262454 0.773165
+-0.336294 0.469297 0.211325 0.788675
+-0.366267 0.446298 0.159291 0.800808
+-0.394672 0.421387 0.106574 0.809511
+-0.421387 0.394672 0.0534015 0.814748
+-0.446298 0.366267 -3.75158e-08 0.816497
+-0.469297 0.336294 -0.0534014 0.814748
+-0.490287 0.30488 -0.106574 0.809511
+-0.509177 0.272161 -0.15929 0.800808
+-0.525887 0.238277 -0.211325 0.788675
+-0.540346 0.203372 -0.262454 0.773165
+-0.55249 0.167596 -0.31246 0.754344
+-0.562268 0.131103 -0.361127 0.732294
+-0.569639 0.0940478 -0.408248 0.707107
+-0.57457 0.0565902 -0.453621 0.678892
+-0.577041 0.0188904 -0.497052 0.64777
+0.577041 0.0188904 0.408248 -0.707107
+0.57457 0.0565902 0.453621 -0.678892
+0.569639 0.0940477 0.497052 -0.64777
+0.562268 0.131103 0.538354 -0.613875
+0.55249 0.167596 0.57735 -0.57735
+0.540346 0.203372 0.613875 -0.538354
+0.525887 0.238277 0.64777 -0.497052
+0.509177 0.272161 0.678892 -0.453621
+0.490287 0.30488 0.707107 -0.408248
+0.469297 0.336294 0.732294 -0.361127
+0.446298 0.366267 0.754344 -0.31246
+0.421387 0.394672 0.773165 -0.262454
+0.394672 0.421387 0.788675 -0.211325
+0.366267 0.446298 0.800808 -0.159291
+0.336294 0.469297 0.809511 -0.106574
+0.30488 0.490287 0.814748 -0.0534014
+0.272161 0.509178 0.816497 6.71206e-08
+0.238276 0.525887 0.814748 0.0534015
+0.203372 0.540346 0.809511 0.106574
+0.167596 0.55249 0.800808 0.159291
+0.131103 0.562268 0.788675 0.211325
+0.0940477 0.569639 0.773165 0.262454
+0.0565902 0.57457 0.754344 0.31246
+0.0188903 0.577041 0.732293 0.361127
+-0.0188904 0.577041 0.707107 0.408248
+-0.0565903 0.57457 0.678892 0.453621
+-0.0940478 0.569639 0.64777 0.497052
+-0.131103 0.562268 0.613875 0.538354
+-0.167596 0.55249 0.57735 0.57735
+-0.203372 0.540346 0.538354 0.613875
+-0.238277 0.525887 0.497052 0.64777
+-0.272161 0.509178 0.453621 0.678892
+-0.30488 0.490287 0.408248 0.707107
+-0.336294 0.469297 0.361127 0.732294
+-0.366267 0.446298 0.31246 0.754344
+-0.394672 0.421387 0.262454 0.773165
+-0.421387 0.394672 0.211325 0.788675
+-0.446298 0.366267 0.159291 0.800808
+-0.469297 0.336294 0.106574 0.809511
+-0.490287 0.30488 0.0534014 0.814748
+-0.509177 0.272161 9.18571e-08 0.816497
+-0.525887 0.238277 -0.0534014 0.814748
+-0.540346 0.203372 -0.106574 0.809511
+-0.55249 0.167596 -0.159291 0.800808
+-0.562268 0.131103 -0.211325 0.788675
+-0.569639 0.0940478 -0.262454 0.773165
+-0.57457 0.0565902 -0.31246 0.754344
+-0.577041 0.0188904 -0.361127 0.732294
+0.612045 0.0200363 0.460489 -0.642612
+0.609424 0.060023 0.501532 -0.611118
+0.604193 0.0997527 0.540427 -0.577008
+0.596375 0.139055 0.577008 -0.540427
+0.586004 0.177762 0.611118 -0.501532
+0.573123 0.215708 0.642612 -0.460489
+0.557788 0.25273 0.671353 -0.417474
+0.540064 0.28867 0.69722 -0.372672
+0.520028 0.323374 0.720101 -0.326274
+0.497765 0.356693 0.739899 -0.278478
+0.47337 0.388485 0.756528 -0.22949
+0.446949 0.418613 0.769917 -0.17952
+0.418613 0.446949 0.78001 -0.12878
+0.388485 0.47337 0.786763 -0.0774894
+0.356693 0.497765 0.790146 -0.0258667
+0.323374 0.520028 0.790146 0.0258668
+0.28867 0.540064 0.786763 0.0774894
+0.25273 0.557788 0.78001 0.12878
+0.215708 0.573123 0.769917 0.17952
+0.177762 0.586004 0.756528 0.22949
+0.139055 0.596375 0.739899 0.278478
+0.0997526 0.604193 0.720101 0.326274
+0.0600229 0.609424 0.69722 0.372672
+0.0200362 0.612045 0.671353 0.417474
+-0.0200363 0.612045 0.642612 0.460489
+-0.060023 0.609424 0.611118 0.501532
+-0.0997527 0.604193 0.577008 0.540427
+-0.139055 0.596375 0.540427 0.577008
+-0.177762 0.586004 0.501532 0.611119
+-0.215708 0.573123 0.460489 0.642612
+-0.25273 0.557788 0.417474 0.671353
+-0.28867 0.540064 0.372672 0.69722
+-0.323374 0.520028 0.326274 0.720101
+-0.356693 0.497765 0.278478 0.739899
+-0.388485 0.47337 0.22949 0.756528
+-0.418613 0.446949 0.179519 0.769917
+-0.446949 0.418613 0.12878 0.78001
+-0.47337 0.388485 0.0774893 0.786763
+-0.497765 0.356693 0.0258668 0.790146
+-0.520028 0.323374 -0.0258667 0.790146
+-0.540064 0.28867 -0.0774893 0.786763
+-0.557788 0.25273 -0.12878 0.78001
+-0.573123 0.215708 -0.17952 0.769917
+-0.586004 0.177762 -0.22949 0.756528
+-0.596375 0.139055 -0.278478 0.739899
+-0.604193 0.0997527 -0.326274 0.720101
+-0.609424 0.060023 -0.372672 0.69722
+-0.612045 0.0200363 -0.417474 0.671353
+0.612045 0.0200363 0.577008 -0.540427
+0.609424 0.060023 0.611118 -0.501532
+0.604193 0.0997527 0.642612 -0.460489
+0.596375 0.139055 0.671353 -0.417474
+0.586004 0.177762 0.69722 -0.372672
+0.573123 0.215708 0.720101 -0.326274
+0.557788 0.25273 0.739899 -0.278478
+0.540064 0.28867 0.756528 -0.22949
+0.520028 0.323374 0.769917 -0.17952
+0.497765 0.356693 0.78001 -0.12878
+0.47337 0.388485 0.786763 -0.0774894
+0.446949 0.418613 0.790146 -0.0258667
+0.418613 0.446949 0.790146 0.0258667
+0.388485 0.47337 0.786763 0.0774894
+0.356693 0.497765 0.78001 0.12878
+0.323374 0.520028 0.769917 0.17952
+0.28867 0.540064 0.756528 0.22949
+0.25273 0.557788 0.739899 0.278478
+0.215708 0.573123 0.720101 0.326274
+0.177762 0.586004 0.69722 0.372672
+0.139055 0.596375 0.671353 0.417474
+0.0997526 0.604193 0.642612 0.460489
+0.0600229 0.609424 0.611118 0.501532
+0.0200362 0.612045 0.577008 0.540427
+-0.0200363 0.612045 0.540427 0.577008
+-0.060023 0.609424 0.501532 0.611118
+-0.0997527 0.604193 0.460489 0.642612
+-0.139055 0.596375 0.417474 0.671353
+-0.177762 0.586004 0.372672 0.69722
+-0.215708 0.573123 0.326273 0.720101
+-0.25273 0.557788 0.278478 0.739899
+-0.28867 0.540064 0.22949 0.756528
+-0.323374 0.520028 0.17952 0.769917
+-0.356693 0.497765 0.12878 0.78001
+-0.388485 0.47337 0.0774894 0.786763
+-0.418613 0.446949 0.0258666 0.790146
+-0.446949 0.418613 -0.0258667 0.790146
+-0.47337 0.388485 -0.0774894 0.786763
+-0.497765 0.356693 -0.12878 0.78001
+-0.520028 0.323374 -0.17952 0.769917
+-0.540064 0.28867 -0.22949 0.756528
+-0.557788 0.25273 -0.278478 0.739899
+-0.573123 0.215708 -0.326274 0.720101
+-0.586004 0.177762 -0.372672 0.69722
+-0.596375 0.139055 -0.417474 0.671353
+-0.604193 0.0997527 -0.460489 0.642612
+-0.609424 0.060023 -0.501532 0.611118
+-0.612045 0.0200363 -0.540427 0.577008
+0.645152 0.0211201 0.605934 -0.464949
+0.642389 0.0632698 0.635045 -0.424324
+0.636876 0.105149 0.661438 -0.381881
+0.628635 0.146577 0.684998 -0.337804
+0.617702 0.187378 0.705625 -0.292279
+0.604125 0.227376 0.72323 -0.245503
+0.58796 0.266401 0.737738 -0.197676
+0.569278 0.304285 0.749087 -0.149003
+0.548158 0.340866 0.757229 -0.099691
+0.52469 0.375988 0.762127 -0.0499525
+0.498976 0.409499 0.763763 -1.38464e-08
+0.471125 0.441257 0.762127 0.0499525
+0.441257 0.471125 0.757229 0.099691
+0.409499 0.498976 0.749087 0.149003
+0.375988 0.52469 0.737738 0.197676
+0.340866 0.548158 0.72323 0.245504
+0.304285 0.569278 0.705625 0.292279
+0.266401 0.58796 0.684998 0.337804
+0.227376 0.604125 0.661438 0.381881
+0.187378 0.617702 0.635045 0.424324
+0.146577 0.628635 0.605934 0.464949
+0.105148 0.636876 0.574227 0.503584
+0.0632697 0.642389 0.540062 0.540062
+0.02112 0.645152 0.503584 0.574227
+-0.0211201 0.645152 0.464949 0.605934
+-0.0632698 0.642389 0.424324 0.635045
+-0.105149 0.636876 0.381881 0.661438
+-0.146577 0.628635 0.337804 0.684998
+-0.187378 0.617702 0.292279 0.705625
+-0.227377 0.604125 0.245503 0.72323
+-0.266401 0.58796 0.197676 0.737738
+-0.304285 0.569278 0.149003 0.749087
+-0.340866 0.548158 0.099691 0.757229
+-0.375988 0.52469 0.0499524 0.762127
+-0.409499 0.498976 2.59851e-08 0.763763
+-0.441257 0.471125 -0.0499525 0.762127
+-0.471125 0.441257 -0.099691 0.757229
+-0.498976 0.409499 -0.149003 0.749087
+-0.52469 0.375988 -0.197676 0.737738
+-0.548158 0.340866 -0.245503 0.72323
+-0.569278 0.304285 -0.292279 0.705625
+-0.58796 0.266401 -0.337804 0.684998
+-0.604125 0.227376 -0.381881 0.661438
+-0.617702 0.187378 -0.424324 0.635045
+-0.628635 0.146577 -0.464949 0.605934
+-0.636876 0.105149 -0.503584 0.574227
+-0.642389 0.0632698 -0.540062 0.540062
+-0.645152 0.0211201 -0.574227 0.503584
+0.645152 0.0211201 0.503584 -0.574227
+0.642389 0.0632698 0.540062 -0.540062
+0.636876 0.105149 0.574227 -0.503584
+0.628635 0.146577 0.605934 -0.464949
+0.617702 0.187378 0.635045 -0.424324
+0.604125 0.227376 0.661438 -0.381881
+0.58796 0.266401 0.684998 -0.337804
+0.569278 0.304285 0.705625 -0.292279
+0.548158 0.340866 0.72323 -0.245503
+0.52469 0.375988 0.737738 -0.197676
+0.498976 0.409499 0.749087 -0.149003
+0.471125 0.441257 0.757229 -0.099691
+0.441257 0.471125 0.762127 -0.0499525
+0.409499 0.498976 0.763763 1.70771e-09
+0.375988 0.52469 0.762127 0.0499525
+0.340866 0.548158 0.757229 0.0996911
+0.304285 0.569278 0.749087 0.149003
+0.266401 0.58796 0.737738 0.197676
+0.227376 0.604125 0.72323 0.245503
+0.187378 0.617702 0.705625 0.292279
+0.146577 0.628635 0.684998 0.337804
+0.105148 0.636876 0.661438 0.381881
+0.0632697 0.642389 0.635045 0.424324
+0.02112 0.645152 0.605934 0.464949
+-0.0211201 0.645152 0.574227 0.503584
+-0.0632698 0.642389 0.540062 0.540062
+-0.105149 0.636876 0.503584 0.574227
+-0.146577 0.628635 0.464949 0.605934
+-0.187378 0.617702 0.424324 0.635045
+-0.227377 0.604125 0.381881 0.661438
+-0.266401 0.58796 0.337803 0.684998
+-0.304285 0.569278 0.292279 0.705625
+-0.340866 0.548158 0.245503 0.72323
+-0.375988 0.52469 0.197676 0.737738
+-0.409499 0.498976 0.149003 0.749087
+-0.441257 0.471125 0.099691 0.757229
+-0.471125 0.441257 0.0499525 0.762127
+-0.498976 0.409499 -3.50928e-08 0.763763
+-0.52469 0.375988 -0.0499524 0.762127
+-0.548158 0.340866 -0.099691 0.757229
+-0.569278 0.304285 -0.149003 0.749087
+-0.58796 0.266401 -0.197676 0.737738
+-0.604125 0.227376 -0.245504 0.72323
+-0.617702 0.187378 -0.292279 0.705625
+-0.628635 0.146577 -0.337804 0.684998
+-0.636876 0.105149 -0.381881 0.661438
+-0.642389 0.0632698 -0.424324 0.635045
+-0.645152 0.0211201 -0.464949 0.605934
+0.676641 0.0221509 0.537165 -0.50311
+0.673743 0.0663579 0.56892 -0.466901
+0.667961 0.110281 0.598239 -0.428692
+0.659318 0.153731 0.624996 -0.388647
+0.647852 0.196524 0.649076 -0.346939
+0.633611 0.238474 0.670378 -0.303744
+0.616658 0.279404 0.688808 -0.259249
+0.597064 0.319137 0.704289 -0.213644
+0.574913 0.357504 0.716754 -0.167124
+0.5503 0.394339 0.72615 -0.119888
+0.523331 0.429486 0.732436 -0.0721387
+0.49412 0.462794 0.735586 -0.0240806
+0.462794 0.49412 0.735586 0.0240806
+0.429486 0.523331 0.732436 0.0721387
+0.394339 0.5503 0.72615 0.119888
+0.357504 0.574913 0.716754 0.167124
+0.319137 0.597064 0.704289 0.213644
+0.279404 0.616658 0.688808 0.259249
+0.238474 0.633611 0.670378 0.303744
+0.196524 0.647852 0.649076 0.346939
+0.153731 0.659318 0.624996 0.388647
+0.110281 0.667961 0.598239 0.428692
+0.0663578 0.673743 0.56892 0.466901
+0.0221508 0.676641 0.537165 0.50311
+-0.022151 0.676641 0.50311 0.537165
+-0.066358 0.673743 0.466901 0.56892
+-0.110281 0.667961 0.428692 0.598239
+-0.153731 0.659318 0.388647 0.624996
+-0.196524 0.647852 0.346938 0.649077
+-0.238475 0.633611 0.303744 0.670378
+-0.279404 0.616658 0.259249 0.688808
+-0.319137 0.597064 0.213644 0.704289
+-0.357504 0.574913 0.167124 0.716754
+-0.394339 0.5503 0.119888 0.72615
+-0.429486 0.523331 0.0721387 0.732436
+-0.462794 0.49412 0.0240805 0.735586
+-0.49412 0.462794 -0.0240805 0.735586
+-0.523331 0.429486 -0.0721387 0.732436
+-0.5503 0.394339 -0.119888 0.72615
+-0.574913 0.357504 -0.167124 0.716754
+-0.597063 0.319137 -0.213644 0.704289
+-0.616658 0.279404 -0.259249 0.688808
+-0.633611 0.238474 -0.303744 0.670378
+-0.647852 0.196524 -0.346939 0.649076
+-0.659318 0.153731 -0.388647 0.624996
+-0.667961 0.110281 -0.428692 0.598239
+-0.673743 0.0663579 -0.466901 0.56892
+-0.676641 0.022151 -0.50311 0.537165
diff --git a/dimos/perception/detection2d/test_yolo_2d_det.py b/dimos/perception/detection2d/test_yolo_2d_det.py
new file mode 100644
index 0000000000..64a49b39d2
--- /dev/null
+++ b/dimos/perception/detection2d/test_yolo_2d_det.py
@@ -0,0 +1,180 @@
+# Copyright 2025 Dimensional Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import time
+import pytest
+import cv2
+import numpy as np
+import reactivex as rx
+from reactivex import operators as ops
+from dimos.perception.detection2d.yolo_2d_det import Yolo2DDetector
+from dimos.stream.video_provider import VideoProvider
+
+
+class TestYolo2DDetector:
+ @pytest.fixture(scope="class")
+ def video_path(self):
+ # Use a video file from assets directory
+ base_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../assets"))
+ video_file = "trimmed_video_office.mov"
+ return os.path.join(base_dir, video_file)
+
+ def test_yolo_detector_initialization(self):
+ """Test YOLO detector initializes correctly with default model path."""
+ try:
+ # Try to initialize with the correct path to the model in the root directory
+ model_path = os.path.join(os.getcwd(), "yolo11n.pt")
+ detector = Yolo2DDetector(model_path=model_path, device="cuda")
+ assert detector is not None
+ assert detector.model is not None
+ except Exception as e:
+ # If the model file doesn't exist, the test should still pass with a warning
+ pytest.skip(f"Skipping test due to model initialization error: {e}")
+
+ def test_yolo_detector_process_image(self, video_path):
+ """Test YOLO detector can process video frames and return detection results."""
+ try:
+ # Initialize detector with model from root directory
+ model_path = os.path.join(os.getcwd(), "yolo11n.pt")
+ detector = Yolo2DDetector(model_path=model_path, device="cuda")
+
+ # Create video provider and directly get a video stream observable
+ assert os.path.exists(video_path), f"Test video not found: {video_path}"
+ video_provider = VideoProvider(dev_name="test_video", video_source=video_path)
+ # Process more frames for thorough testing
+ video_stream = video_provider.capture_video_as_observable(realtime=False, fps=15)
+
+ # Use ReactiveX operators to process the stream
+ def process_frame(frame):
+ try:
+ # Process frame with YOLO
+ bboxes, track_ids, class_ids, confidences, names = detector.process_image(frame)
+ print(
+ f"YOLO results - boxes: {(bboxes)}, tracks: {len(track_ids)}, classes: {(class_ids)}, confidences: {(confidences)}, names: {(names)}"
+ )
+
+ return {
+ "frame": frame,
+ "bboxes": bboxes,
+ "track_ids": track_ids,
+ "class_ids": class_ids,
+ "confidences": confidences,
+ "names": names,
+ }
+ except Exception as e:
+ return {}
+
+ # Create the detection stream using pipe and map operator
+ detection_stream = video_stream.pipe(ops.map(process_frame))
+
+ # Collect results from the stream
+ results = []
+
+ frames_processed = 0
+ target_frames = 10
+
+ def on_next(result):
+ nonlocal frames_processed
+ if not result:
+ return
+
+ results.append(result)
+ frames_processed += 1
+
+ # Stop after processing target number of frames
+ if frames_processed >= target_frames:
+ subscription.dispose()
+
+ def on_error(error):
+ pytest.fail(f"Error in detection stream: {error}")
+
+ def on_completed():
+ pass
+
+ # Subscribe and wait for results
+ subscription = detection_stream.subscribe(
+ on_next=on_next, on_error=on_error, on_completed=on_completed
+ )
+
+ timeout = 10.0
+ start_time = time.time()
+ while frames_processed < target_frames and time.time() - start_time < timeout:
+ time.sleep(0.5)
+
+ # Clean up subscription
+ subscription.dispose()
+ video_provider.dispose_all()
+ # Check that we got detection results
+ if len(results) == 0:
+ pytest.skip("Skipping test due to error: Failed to get any detection results")
+
+ # Verify we have detection results with expected properties
+ assert len(results) > 0, "No detection results were received"
+
+ # Print statistics about detections
+ total_detections = sum(len(r["bboxes"]) for r in results if r.get("bboxes"))
+ avg_detections = total_detections / len(results) if results else 0
+ print(f"Total detections: {total_detections}, Average per frame: {avg_detections:.2f}")
+
+ # Print most common detected objects
+ object_counts = {}
+ for r in results:
+ if r.get("names"):
+ for name in r["names"]:
+ if name:
+ object_counts[name] = object_counts.get(name, 0) + 1
+
+ if object_counts:
+ print("Detected objects:")
+ for obj, count in sorted(object_counts.items(), key=lambda x: x[1], reverse=True)[
+ :5
+ ]:
+ print(f" - {obj}: {count} times")
+
+ # Analyze the first result
+ result = results[0]
+
+ # Check that we have a frame
+ assert "frame" in result, "Result doesn't contain a frame"
+ assert isinstance(result["frame"], np.ndarray), "Frame is not a numpy array"
+
+ # Check that detection results are valid
+ assert isinstance(result["bboxes"], list)
+ assert isinstance(result["track_ids"], list)
+ assert isinstance(result["class_ids"], list)
+ assert isinstance(result["confidences"], list)
+ assert isinstance(result["names"], list)
+
+ # All result lists should be the same length
+ assert (
+ len(result["bboxes"])
+ == len(result["track_ids"])
+ == len(result["class_ids"])
+ == len(result["confidences"])
+ == len(result["names"])
+ )
+
+ # If we have detections, check that bbox format is valid
+ if result["bboxes"]:
+ assert len(result["bboxes"][0]) == 4, (
+ "Bounding boxes should be in [x1, y1, x2, y2] format"
+ )
+
+ except Exception as e:
+ pytest.skip(f"Skipping test due to error: {e}")
+
+
+if __name__ == "__main__":
+ pytest.main(["-v", __file__])
diff --git a/dimos/perception/segmentation/test_sam_2d_seg.py b/dimos/perception/segmentation/test_sam_2d_seg.py
new file mode 100644
index 0000000000..0e98c23ee9
--- /dev/null
+++ b/dimos/perception/segmentation/test_sam_2d_seg.py
@@ -0,0 +1,217 @@
+# Copyright 2025 Dimensional Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import time
+from dimos.stream import video_provider
+import pytest
+import cv2
+import numpy as np
+import reactivex as rx
+from reactivex import operators as ops
+from dimos.stream.video_provider import VideoProvider
+from dimos.perception.segmentation.sam_2d_seg import Sam2DSegmenter
+from dimos.perception.segmentation.utils import extract_masks_bboxes_probs_names
+
+
+class TestSam2DSegmenter:
+ @pytest.fixture(scope="class")
+ def video_path(self):
+ # Use a video file from assets directory
+ base_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../assets"))
+ video_file = "trimmed_video_office.mov"
+ return os.path.join(base_dir, video_file)
+
+ def test_sam_segmenter_initialization(self):
+ """Test FastSAM segmenter initializes correctly with default model path."""
+ try:
+ # Try to initialize with the default model path and existing device setting
+ segmenter = Sam2DSegmenter(device="cuda", use_analyzer=False)
+ assert segmenter is not None
+ assert segmenter.model is not None
+ except Exception as e:
+ # If the model file doesn't exist, the test should still pass with a warning
+ pytest.skip(f"Skipping test due to model initialization error: {e}")
+
+ def test_sam_segmenter_process_image(self, video_path):
+ """Test FastSAM segmenter can process video frames and return segmentation masks."""
+ try:
+ # Initialize segmenter without analyzer for faster testing
+ segmenter = Sam2DSegmenter(
+ device="cuda",
+ use_analyzer=False,
+ use_tracker=False, # Disable tracker for simpler testing
+ )
+
+ # Note: conf and iou are parameters for process_image, not constructor
+ # We'll monkey patch the process_image method to use lower thresholds
+ original_process_image = segmenter.process_image
+
+ def patched_process_image(image):
+ results = segmenter.model.track(
+ source=image,
+ device=segmenter.device,
+ retina_masks=True,
+ conf=0.1, # Lower confidence threshold for testing
+ iou=0.5, # Lower IoU threshold
+ persist=True,
+ verbose=False,
+ tracker=segmenter.tracker_config
+ if hasattr(segmenter, "tracker_config")
+ else None,
+ )
+
+ if len(results) > 0:
+ masks, bboxes, track_ids, probs, names, areas = (
+ extract_masks_bboxes_probs_names(results[0])
+ )
+ return masks, bboxes, track_ids, probs, names
+ return [], [], [], [], []
+
+ # Replace the method
+ segmenter.process_image = patched_process_image
+
+ # Create video provider and directly get a video stream observable
+ assert os.path.exists(video_path), f"Test video not found: {video_path}"
+ video_provider = VideoProvider(dev_name="test_video", video_source=video_path)
+
+ video_stream = video_provider.capture_video_as_observable(realtime=False, fps=1)
+
+ # Use ReactiveX operators to process the stream
+ def process_frame(frame):
+ try:
+ # Process frame with FastSAM
+ masks, bboxes, track_ids, probs, names = segmenter.process_image(frame)
+ print(
+ f"SAM results - masks: {len(masks)}, bboxes: {len(bboxes)}, track_ids: {len(track_ids)}, names: {len(names)}"
+ )
+
+ return {
+ "frame": frame,
+ "masks": masks,
+ "bboxes": bboxes,
+ "track_ids": track_ids,
+ "probs": probs,
+ "names": names,
+ }
+ except Exception as e:
+ print(f"Error in process_frame: {e}")
+ return {}
+
+ # Create the segmentation stream using pipe and map operator
+ segmentation_stream = video_stream.pipe(ops.map(process_frame))
+
+ # Collect results from the stream
+ results = []
+ frames_processed = 0
+ target_frames = 5
+
+ def on_next(result):
+ nonlocal frames_processed, results
+ if not result:
+ return
+
+ results.append(result)
+ frames_processed += 1
+
+ # Stop processing after target frames
+ if frames_processed >= target_frames:
+ subscription.dispose()
+
+ def on_error(error):
+ pytest.fail(f"Error in segmentation stream: {error}")
+
+ def on_completed():
+ pass
+
+ # Subscribe and wait for results
+ subscription = segmentation_stream.subscribe(
+ on_next=on_next, on_error=on_error, on_completed=on_completed
+ )
+
+ # Wait for frames to be processed
+ timeout = 30.0 # seconds
+ start_time = time.time()
+ while frames_processed < target_frames and time.time() - start_time < timeout:
+ time.sleep(0.5)
+
+ # Clean up subscription
+ subscription.dispose()
+ video_provider.dispose_all()
+
+ # Check if we have results
+ if len(results) == 0:
+ pytest.skip(
+ "No segmentation results found, but test connection established correctly"
+ )
+ return
+
+ print(f"Processed {len(results)} frames with segmentation results")
+
+ # Analyze the first result
+ result = results[0]
+
+ # Check that we have a frame
+ assert "frame" in result, "Result doesn't contain a frame"
+ assert isinstance(result["frame"], np.ndarray), "Frame is not a numpy array"
+
+ # Check that segmentation results are valid
+ assert isinstance(result["masks"], list)
+ assert isinstance(result["bboxes"], list)
+ assert isinstance(result["track_ids"], list)
+ assert isinstance(result["probs"], list)
+ assert isinstance(result["names"], list)
+
+ # All result lists should be the same length
+ assert (
+ len(result["masks"])
+ == len(result["bboxes"])
+ == len(result["track_ids"])
+ == len(result["probs"])
+ == len(result["names"])
+ )
+
+ # If we have masks, check that they have valid shape
+ if result.get("masks") and len(result["masks"]) > 0:
+ assert result["masks"][0].shape == (
+ result["frame"].shape[0],
+ result["frame"].shape[1],
+ ), "Mask shape should match image dimensions"
+ print(f"Found {len(result['masks'])} masks in first frame")
+ else:
+ print("No masks found in first frame, but test connection established correctly")
+
+ # Test visualization function
+ if result["masks"]:
+ vis_frame = segmenter.visualize_results(
+ result["frame"],
+ result["masks"],
+ result["bboxes"],
+ result["track_ids"],
+ result["probs"],
+ result["names"],
+ )
+ assert isinstance(vis_frame, np.ndarray), "Visualization output should be an image"
+ assert vis_frame.shape == result["frame"].shape, (
+ "Visualization should have same dimensions as input frame"
+ )
+
+ # We've already tested visualization above, so no need for a duplicate test
+
+ except Exception as e:
+ pytest.skip(f"Skipping test due to error: {e}")
+
+
+if __name__ == "__main__":
+ pytest.main(["-v", __file__])
diff --git a/pyproject.toml b/pyproject.toml
index 2f81fd62b1..116238aeb9 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,10 +1,127 @@
+[build-system]
+requires = ["setuptools>=42", "wheel"]
+build-backend = "setuptools.build_meta"
+
+[tool.setuptools]
+packages = ["dimos"]
+
[project]
name = "dimos"
authors = [
- {name = "Stash Pomichter", email = "stash@dimensionalOS.com"},
+ {name = "Dimensional Team", email = "build@dimensionalOS.com"},
]
-version = "0.0.2"
+version = "0.0.4"
description = "Powering agentive generalist robotics"
+requires-python = ">=3.10"
+
+dependencies = [
+ # Core requirements
+ "opencv-python",
+ "python-dotenv",
+ "openai",
+ "anthropic>=0.19.0",
+ "cerebras-cloud-sdk",
+ "numpy>=1.26.4,<2.0.0",
+ "colorlog==6.9.0",
+ "yapf==0.40.2",
+ "typeguard",
+ "empy==3.3.4",
+ "catkin_pkg",
+ "lark",
+ "plum-dispatch==2.5.7",
+ "ffmpeg-python",
+ "pytest",
+ "tiktoken>=0.8.0",
+ "Flask>=2.2",
+ "python-multipart==0.0.20",
+ "reactivex",
+ "rxpy-backpressure @ git+https://github.com/dimensionalOS/rxpy-backpressure.git",
+ "pytest-asyncio==0.26.0",
+ "asyncio==3.4.3",
+ "go2-webrtc-connect @ git+https://github.com/legion1581/go2_webrtc_connect.git@fe64abb5987594e8c048427a98445799f6f6a9cc",
+
+ # Web Extensions
+ "fastapi>=0.115.6",
+ "sse-starlette>=2.2.1",
+ "uvicorn>=0.34.0",
+
+ # Agent Memory
+ "langchain-chroma>=0.1.4",
+ "langchain-openai>=0.2.14",
+
+ # Class Extraction
+ "pydantic",
+
+ # Developer Specific
+ "ipykernel",
+
+ # Unitree webrtc streaming
+ "aiortc==1.9.0",
+ "pycryptodome",
+ "sounddevice",
+ "pyaudio",
+ "requests",
+ "wasmtime",
+
+ # Audio
+ "openai-whisper",
+ "soundfile",
+
+ # Hugging Face
+ "transformers[torch]==4.49.0",
+
+ # Vector Embedding
+ "sentence_transformers",
+
+ # CTransforms GGUF
+ "ctransformers[cuda]==0.2.27",
+
+ # Perception Dependencies
+ "ultralytics>=8.3.70",
+ "filterpy>=1.4.5",
+ "scipy>=1.15.1",
+ "scikit-learn",
+ "Pillow",
+ "mmengine>=0.10.3",
+ "mmcv>=2.1.0",
+ "timm>=1.0.15",
+ "lap>=0.5.12",
+ "xformers==0.0.20",
+
+ # Detic
+ "mss",
+ "dataclasses",
+ "ftfy",
+ "regex",
+ "fasttext",
+ "lvis",
+ "nltk",
+ "clip @ git+https://github.com/openai/CLIP.git",
+ "detectron2 @ git+https://github.com/facebookresearch/detectron2.git@v0.6",
+
+ # Mapping
+ "open3d",
+]
+
+[project.optional-dependencies]
+manipulation = [
+
+ # Contact Graspnet Dependencies
+ "h5py>=3.7.0",
+ "pyrender>=0.1.45",
+ "trimesh>=3.22.0",
+ "python-fcl>=0.7.0.4",
+ "pyquaternion>=0.9.9",
+ "matplotlib>=3.7.1",
+ "rtree",
+ "pandas>=1.5.2",
+ "tqdm>=4.65.0",
+ "pyyaml>=6.0",
+
+ # Visualization (Optional)
+ "kaleido>=0.2.1",
+ "plotly>=5.9.0",
+]
[tool.ruff]
line-length = 100